From eaaa66cd368f4bfe6896e556b87637478bbf6874 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 20 Sep 2021 17:52:53 +0300 Subject: [PATCH 01/69] RE preparations --- .../field_level/fieldLevelConfig.json | 26 +- reverse_engineering/api.js | 269 +- .../helpers/connectionHelper.js | 246 - reverse_engineering/helpers/mariadbHelper.js | 310 - .../helpers/parsers/functionHelper.js | 92 - .../helpers/parsers/procedureHelper.js | 90 - reverse_engineering/node_modules/.bin/semver | 1 + reverse_engineering/node_modules/asn1/LICENSE | 19 + .../node_modules/asn1/README.md | 50 + .../node_modules/asn1/lib/ber/errors.js | 13 + .../node_modules/asn1/lib/ber/index.js | 27 + .../node_modules/asn1/lib/ber/reader.js | 262 + .../node_modules/asn1/lib/ber/types.js | 36 + .../node_modules/asn1/lib/ber/writer.js | 317 + .../node_modules/asn1/lib/index.js | 20 + .../node_modules/asn1/package.json | 78 + .../node_modules/buffer-writer/.travis.yml | 7 + .../node_modules/buffer-writer/LICENSE | 19 + .../node_modules/buffer-writer/README.md | 48 + .../node_modules/buffer-writer/index.js | 129 + .../node_modules/buffer-writer/package.json | 57 + .../buffer-writer/test/mocha.opts | 1 + .../buffer-writer/test/writer-tests.js | 218 + .../node_modules/debug/.coveralls.yml | 1 + .../node_modules/debug/.eslintrc | 11 + .../node_modules/debug/.npmignore | 9 + .../node_modules/debug/.travis.yml | 14 + .../node_modules/debug/CHANGELOG.md | 362 ++ .../node_modules/debug/LICENSE | 19 + .../node_modules/debug/Makefile | 50 + .../node_modules/debug/README.md | 312 + .../node_modules/debug/component.json | 19 + .../node_modules/debug/karma.conf.js | 70 + .../node_modules/debug/node.js | 1 + .../node_modules/debug/package.json | 91 + .../node_modules/debug/src/browser.js | 185 + .../node_modules/debug/src/debug.js | 202 + .../node_modules/debug/src/index.js | 10 + .../node_modules/debug/src/inspector-log.js | 15 + .../node_modules/debug/src/node.js | 248 + .../node_modules/inherits/LICENSE | 16 + .../node_modules/inherits/README.md | 42 + .../node_modules/inherits/inherits.js | 9 + .../node_modules/inherits/inherits_browser.js | 27 + .../node_modules/inherits/package.json | 61 + .../node_modules/lodash.defaults/LICENSE | 47 + .../node_modules/lodash.defaults/README.md | 18 + .../node_modules/lodash.defaults/index.js | 668 ++ .../node_modules/lodash.defaults/package.json | 72 + reverse_engineering/node_modules/ms/index.js | 152 + .../node_modules/ms/license.md | 21 + .../node_modules/ms/package.json | 72 + reverse_engineering/node_modules/ms/readme.md | 51 + .../node_modules/packet-reader/.travis.yml | 8 + .../node_modules/packet-reader/README.md | 87 + .../node_modules/packet-reader/index.js | 65 + .../node_modules/packet-reader/package.json | 52 + .../node_modules/packet-reader/test/index.js | 148 + .../node_modules/pg-connection-string/LICENSE | 21 + .../pg-connection-string/README.md | 77 + .../pg-connection-string/index.d.ts | 15 + .../pg-connection-string/index.js | 106 + .../pg-connection-string/package.json | 69 + .../node_modules/pg-int8/LICENSE | 13 + .../node_modules/pg-int8/README.md | 16 + .../node_modules/pg-int8/index.js | 100 + .../node_modules/pg-int8/package.json | 52 + .../node_modules/pg-pool/LICENSE | 21 + .../node_modules/pg-pool/README.md | 376 ++ .../node_modules/pg-pool/index.js | 421 ++ .../node_modules/pg-pool/package.json | 68 + .../pg-pool/test/bring-your-own-promise.js | 42 + .../pg-pool/test/connection-strings.js | 29 + .../pg-pool/test/connection-timeout.js | 229 + .../node_modules/pg-pool/test/ending.js | 40 + .../pg-pool/test/error-handling.js | 248 + .../node_modules/pg-pool/test/events.js | 86 + .../pg-pool/test/idle-timeout-exit.js | 16 + .../node_modules/pg-pool/test/idle-timeout.js | 118 + .../node_modules/pg-pool/test/index.js | 226 + .../node_modules/pg-pool/test/logging.js | 20 + .../node_modules/pg-pool/test/max-uses.js | 98 + .../pg-pool/test/releasing-clients.js | 54 + .../node_modules/pg-pool/test/setup.js | 10 + .../node_modules/pg-pool/test/sizing.js | 58 + .../node_modules/pg-pool/test/submittable.js | 19 + .../node_modules/pg-pool/test/timeout.js | 0 .../node_modules/pg-pool/test/verify.js | 24 + .../node_modules/pg-protocol/LICENSE | 21 + .../node_modules/pg-protocol/README.md | 3 + .../node_modules/pg-protocol/dist/b.d.ts | 1 + .../node_modules/pg-protocol/dist/b.js | 25 + .../node_modules/pg-protocol/dist/b.js.map | 1 + .../pg-protocol/dist/buffer-reader.d.ts | 14 + .../pg-protocol/dist/buffer-reader.js | 50 + .../pg-protocol/dist/buffer-reader.js.map | 1 + .../pg-protocol/dist/buffer-writer.d.ts | 16 + .../pg-protocol/dist/buffer-writer.js | 81 + .../pg-protocol/dist/buffer-writer.js.map | 1 + .../pg-protocol/dist/inbound-parser.test.d.ts | 1 + .../pg-protocol/dist/inbound-parser.test.js | 511 ++ .../dist/inbound-parser.test.js.map | 1 + .../node_modules/pg-protocol/dist/index.d.ts | 6 + .../node_modules/pg-protocol/dist/index.js | 15 + .../pg-protocol/dist/index.js.map | 1 + .../pg-protocol/dist/messages.d.ts | 162 + .../node_modules/pg-protocol/dist/messages.js | 160 + .../pg-protocol/dist/messages.js.map | 1 + .../dist/outbound-serializer.test.d.ts | 1 + .../dist/outbound-serializer.test.js | 248 + .../dist/outbound-serializer.test.js.map | 1 + .../node_modules/pg-protocol/dist/parser.d.ts | 38 + .../node_modules/pg-protocol/dist/parser.js | 308 + .../pg-protocol/dist/parser.js.map | 1 + .../pg-protocol/dist/serializer.d.ts | 43 + .../pg-protocol/dist/serializer.js | 189 + .../pg-protocol/dist/serializer.js.map | 1 + .../node_modules/pg-protocol/package.json | 64 + .../node_modules/pg-protocol/src/b.ts | 28 + .../pg-protocol/src/buffer-reader.ts | 53 + .../pg-protocol/src/buffer-writer.ts | 85 + .../pg-protocol/src/inbound-parser.test.ts | 557 ++ .../node_modules/pg-protocol/src/index.ts | 11 + .../node_modules/pg-protocol/src/messages.ts | 230 + .../src/outbound-serializer.test.ts | 272 + .../node_modules/pg-protocol/src/parser.ts | 389 ++ .../pg-protocol/src/serializer.ts | 274 + .../pg-protocol/src/testing/buffer-list.ts | 75 + .../pg-protocol/src/testing/test-buffers.ts | 166 + .../pg-protocol/src/types/chunky.d.ts | 1 + .../node_modules/pg-types/.travis.yml | 7 + .../node_modules/pg-types/Makefile | 14 + .../node_modules/pg-types/README.md | 75 + .../node_modules/pg-types/index.d.ts | 137 + .../node_modules/pg-types/index.js | 47 + .../node_modules/pg-types/index.test-d.ts | 21 + .../node_modules/pg-types/lib/arrayParser.js | 11 + .../pg-types/lib/binaryParsers.js | 257 + .../node_modules/pg-types/lib/builtins.js | 73 + .../node_modules/pg-types/lib/textParsers.js | 215 + .../node_modules/pg-types/package.json | 69 + .../node_modules/pg-types/test/index.js | 24 + .../node_modules/pg-types/test/types.js | 597 ++ reverse_engineering/node_modules/pg/LICENSE | 21 + reverse_engineering/node_modules/pg/README.md | 101 + .../node_modules/pg/lib/client.js | 621 ++ .../pg/lib/connection-parameters.js | 166 + .../node_modules/pg/lib/connection.js | 221 + .../node_modules/pg/lib/defaults.js | 80 + .../node_modules/pg/lib/index.js | 55 + .../node_modules/pg/lib/native/client.js | 297 + .../node_modules/pg/lib/native/index.js | 2 + .../node_modules/pg/lib/native/query.js | 165 + .../node_modules/pg/lib/query.js | 234 + .../node_modules/pg/lib/result.js | 100 + .../node_modules/pg/lib/sasl.js | 209 + .../node_modules/pg/lib/type-overrides.js | 35 + .../node_modules/pg/lib/utils.js | 187 + .../node_modules/pg/package.json | 89 + .../node_modules/pgpass/README.md | 74 + .../node_modules/pgpass/lib/helper.js | 233 + .../node_modules/pgpass/lib/index.js | 23 + .../node_modules/pgpass/package.json | 72 + .../node_modules/postgres-array/index.d.ts | 4 + .../node_modules/postgres-array/index.js | 97 + .../node_modules/postgres-array/license | 21 + .../node_modules/postgres-array/package.json | 67 + .../node_modules/postgres-array/readme.md | 43 + .../node_modules/postgres-bytea/index.js | 31 + .../node_modules/postgres-bytea/license | 21 + .../node_modules/postgres-bytea/package.json | 66 + .../node_modules/postgres-bytea/readme.md | 34 + .../node_modules/postgres-date/index.js | 116 + .../node_modules/postgres-date/license | 21 + .../node_modules/postgres-date/package.json | 65 + .../node_modules/postgres-date/readme.md | 49 + .../node_modules/postgres-interval/index.d.ts | 20 + .../node_modules/postgres-interval/index.js | 125 + .../node_modules/postgres-interval/license | 21 + .../postgres-interval/package.json | 68 + .../node_modules/postgres-interval/readme.md | 48 + .../readable-stream/CONTRIBUTING.md | 38 + .../readable-stream/GOVERNANCE.md | 136 + .../node_modules/readable-stream/LICENSE | 47 + .../node_modules/readable-stream/README.md | 106 + .../readable-stream/errors-browser.js | 127 + .../node_modules/readable-stream/errors.js | 116 + .../readable-stream/experimentalWarning.js | 17 + .../readable-stream/lib/_stream_duplex.js | 139 + .../lib/_stream_passthrough.js | 39 + .../readable-stream/lib/_stream_readable.js | 1124 ++++ .../readable-stream/lib/_stream_transform.js | 201 + .../readable-stream/lib/_stream_writable.js | 697 +++ .../lib/internal/streams/async_iterator.js | 207 + .../lib/internal/streams/buffer_list.js | 210 + .../lib/internal/streams/destroy.js | 105 + .../lib/internal/streams/end-of-stream.js | 104 + .../lib/internal/streams/from-browser.js | 3 + .../lib/internal/streams/from.js | 64 + .../lib/internal/streams/pipeline.js | 97 + .../lib/internal/streams/state.js | 27 + .../lib/internal/streams/stream-browser.js | 1 + .../lib/internal/streams/stream.js | 1 + .../node_modules/readable-stream/package.json | 97 + .../readable-stream/readable-browser.js | 9 + .../node_modules/readable-stream/readable.js | 16 + .../node_modules/safe-buffer/LICENSE | 21 + .../node_modules/safe-buffer/README.md | 584 ++ .../node_modules/safe-buffer/index.d.ts | 187 + .../node_modules/safe-buffer/index.js | 65 + .../node_modules/safe-buffer/package.json | 76 + .../node_modules/safer-buffer/LICENSE | 21 + .../safer-buffer/Porting-Buffer.md | 268 + .../node_modules/safer-buffer/Readme.md | 156 + .../node_modules/safer-buffer/dangerous.js | 58 + .../node_modules/safer-buffer/package.json | 63 + .../node_modules/safer-buffer/safer.js | 77 + .../node_modules/safer-buffer/tests.js | 406 ++ .../node_modules/semver/CHANGELOG.md | 39 + .../node_modules/semver/LICENSE | 15 + .../node_modules/semver/README.md | 412 ++ .../node_modules/semver/bin/semver | 160 + .../node_modules/semver/package.json | 63 + .../node_modules/semver/range.bnf | 16 + .../node_modules/semver/semver.js | 1483 +++++ .../node_modules/split2/LICENSE | 13 + .../node_modules/split2/README.md | 97 + .../node_modules/split2/bench.js | 27 + .../node_modules/split2/index.js | 132 + .../node_modules/split2/package.json | 69 + .../node_modules/split2/test.js | 392 ++ .../node_modules/ssh2-streams/.travis.yml | 17 + .../node_modules/ssh2-streams/LICENSE | 19 + .../node_modules/ssh2-streams/README.md | 569 ++ .../node_modules/ssh2-streams/SFTPStream.md | 405 ++ .../node_modules/ssh2-streams/index.js | 6 + .../ssh2-streams/lib/constants.js | 386 ++ .../node_modules/ssh2-streams/lib/jsbn.js | 1186 ++++ .../ssh2-streams/lib/keyParser.js | 239 + .../node_modules/ssh2-streams/lib/sftp.js | 2967 +++++++++ .../node_modules/ssh2-streams/lib/ssh.js | 5403 +++++++++++++++++ .../node_modules/ssh2-streams/lib/utils.js | 817 +++ .../node_modules/ssh2-streams/package.json | 73 + .../test/fixtures/encrypted-dsa.ppk | 17 + .../test/fixtures/encrypted-rsa.ppk | 18 + .../ssh2-streams/test/fixtures/id_rsa | 27 + .../ssh2-streams/test/fixtures/id_rsa_enc | 30 + .../test/fixtures/ssh_host_rsa_key | 15 + .../ssh2-streams/test/test-durability-ssh.js | 169 + .../ssh2-streams/test/test-kexdh.js | 66 + .../ssh2-streams/test/test-keyparser.js | 2301 +++++++ .../ssh2-streams/test/test-packet60.js | 256 + .../ssh2-streams/test/test-sftp.js | 1274 ++++ .../ssh2-streams/test/test-ssh.js | 143 + .../ssh2-streams/test/test-utils.js | 678 +++ .../node_modules/ssh2-streams/test/test.js | 22 + .../node_modules/ssh2/.travis.yml | 18 + reverse_engineering/node_modules/ssh2/LICENSE | 19 + .../node_modules/ssh2/README.md | 1053 ++++ .../node_modules/ssh2/examples/server-chat.js | 243 + .../examples/sftp-server-download-only.js | 96 + .../node_modules/ssh2/lib/Channel.js | 506 ++ .../node_modules/ssh2/lib/SFTPWrapper.js | 145 + .../node_modules/ssh2/lib/agent.js | 412 ++ .../node_modules/ssh2/lib/client.js | 1522 +++++ .../node_modules/ssh2/lib/keepalivemgr.js | 76 + .../node_modules/ssh2/lib/server.js | 1156 ++++ .../node_modules/ssh2/lib/utils.js | 5 + .../node_modules/ssh2/package.json | 74 + .../ssh2/test/fixtures/bad_rsa_private_key | 26 + .../node_modules/ssh2/test/fixtures/id_dsa | 12 + .../node_modules/ssh2/test/fixtures/id_ecdsa | 5 + .../node_modules/ssh2/test/fixtures/id_rsa | 15 + .../ssh2/test/fixtures/id_rsa.ppk | 26 + .../ssh2/test/fixtures/id_rsa_enc | 30 + .../ssh2/test/fixtures/ssh_host_dsa_key | 12 + .../ssh2/test/fixtures/ssh_host_ecdsa_key | 5 + .../ssh2/test/fixtures/ssh_host_rsa_key | 15 + .../ssh2/test/test-client-server.js | 1902 ++++++ .../node_modules/ssh2/test/test-openssh.js | 459 ++ .../node_modules/ssh2/test/test.js | 22 + .../node_modules/ssh2/util/build_pagent.bat | 2 + .../node_modules/ssh2/util/pagent.c | 88 + .../node_modules/ssh2/util/pagent.exe | Bin 0 -> 50688 bytes .../node_modules/streamsearch/LICENSE | 19 + .../node_modules/streamsearch/README.md | 87 + .../node_modules/streamsearch/lib/sbmh.js | 213 + .../node_modules/streamsearch/package.json | 62 + .../node_modules/string_decoder/LICENSE | 48 + .../node_modules/string_decoder/README.md | 47 + .../string_decoder/lib/string_decoder.js | 296 + .../node_modules/string_decoder/package.json | 62 + .../node_modules/tunnel-ssh/.eslintignore | 4 + .../node_modules/tunnel-ssh/README.md | 139 + .../tunnel-ssh/examples/default.js | 26 + .../tunnel-ssh/examples/keepAlive.js | 35 + .../tunnel-ssh/examples/keepAlive_error.js | 35 + .../tunnel-ssh/examples/loginError.js | 30 + .../tunnel-ssh/examples/server/index.js | 36 + .../node_modules/tunnel-ssh/index.js | 101 + .../node_modules/tunnel-ssh/lib/config.js | 53 + .../node_modules/tunnel-ssh/package.json | 80 + .../node_modules/tunnel-ssh/rewrite.js | 138 + .../tunnel-ssh/test/config-spec.js | 56 + .../node_modules/tunnel-ssh/test/main-spec.js | 37 + .../tunnel-ssh/test/server/index.js | 36 + .../node_modules/util-deprecate/History.md | 16 + .../node_modules/util-deprecate/LICENSE | 24 + .../node_modules/util-deprecate/README.md | 53 + .../node_modules/util-deprecate/browser.js | 67 + .../node_modules/util-deprecate/node.js | 6 + .../node_modules/util-deprecate/package.json | 56 + .../node_modules/xtend/.jshintrc | 30 + .../node_modules/xtend/LICENSE | 20 + .../node_modules/xtend/README.md | 32 + .../node_modules/xtend/immutable.js | 19 + .../node_modules/xtend/mutable.js | 17 + .../node_modules/xtend/package.json | 86 + .../node_modules/xtend/test.js | 103 + reverse_engineering/package-lock.json | 181 +- reverse_engineering/package.json | 2 +- 321 files changed, 52748 insertions(+), 1077 deletions(-) delete mode 100644 reverse_engineering/helpers/connectionHelper.js delete mode 100644 reverse_engineering/helpers/mariadbHelper.js delete mode 100644 reverse_engineering/helpers/parsers/functionHelper.js delete mode 100644 reverse_engineering/helpers/parsers/procedureHelper.js create mode 120000 reverse_engineering/node_modules/.bin/semver create mode 100644 reverse_engineering/node_modules/asn1/LICENSE create mode 100644 reverse_engineering/node_modules/asn1/README.md create mode 100644 reverse_engineering/node_modules/asn1/lib/ber/errors.js create mode 100644 reverse_engineering/node_modules/asn1/lib/ber/index.js create mode 100644 reverse_engineering/node_modules/asn1/lib/ber/reader.js create mode 100644 reverse_engineering/node_modules/asn1/lib/ber/types.js create mode 100644 reverse_engineering/node_modules/asn1/lib/ber/writer.js create mode 100644 reverse_engineering/node_modules/asn1/lib/index.js create mode 100644 reverse_engineering/node_modules/asn1/package.json create mode 100644 reverse_engineering/node_modules/buffer-writer/.travis.yml create mode 100644 reverse_engineering/node_modules/buffer-writer/LICENSE create mode 100644 reverse_engineering/node_modules/buffer-writer/README.md create mode 100644 reverse_engineering/node_modules/buffer-writer/index.js create mode 100644 reverse_engineering/node_modules/buffer-writer/package.json create mode 100644 reverse_engineering/node_modules/buffer-writer/test/mocha.opts create mode 100644 reverse_engineering/node_modules/buffer-writer/test/writer-tests.js create mode 100644 reverse_engineering/node_modules/debug/.coveralls.yml create mode 100644 reverse_engineering/node_modules/debug/.eslintrc create mode 100644 reverse_engineering/node_modules/debug/.npmignore create mode 100644 reverse_engineering/node_modules/debug/.travis.yml create mode 100644 reverse_engineering/node_modules/debug/CHANGELOG.md create mode 100644 reverse_engineering/node_modules/debug/LICENSE create mode 100644 reverse_engineering/node_modules/debug/Makefile create mode 100644 reverse_engineering/node_modules/debug/README.md create mode 100644 reverse_engineering/node_modules/debug/component.json create mode 100644 reverse_engineering/node_modules/debug/karma.conf.js create mode 100644 reverse_engineering/node_modules/debug/node.js create mode 100644 reverse_engineering/node_modules/debug/package.json create mode 100644 reverse_engineering/node_modules/debug/src/browser.js create mode 100644 reverse_engineering/node_modules/debug/src/debug.js create mode 100644 reverse_engineering/node_modules/debug/src/index.js create mode 100644 reverse_engineering/node_modules/debug/src/inspector-log.js create mode 100644 reverse_engineering/node_modules/debug/src/node.js create mode 100644 reverse_engineering/node_modules/inherits/LICENSE create mode 100644 reverse_engineering/node_modules/inherits/README.md create mode 100644 reverse_engineering/node_modules/inherits/inherits.js create mode 100644 reverse_engineering/node_modules/inherits/inherits_browser.js create mode 100644 reverse_engineering/node_modules/inherits/package.json create mode 100644 reverse_engineering/node_modules/lodash.defaults/LICENSE create mode 100644 reverse_engineering/node_modules/lodash.defaults/README.md create mode 100644 reverse_engineering/node_modules/lodash.defaults/index.js create mode 100644 reverse_engineering/node_modules/lodash.defaults/package.json create mode 100644 reverse_engineering/node_modules/ms/index.js create mode 100644 reverse_engineering/node_modules/ms/license.md create mode 100644 reverse_engineering/node_modules/ms/package.json create mode 100644 reverse_engineering/node_modules/ms/readme.md create mode 100644 reverse_engineering/node_modules/packet-reader/.travis.yml create mode 100644 reverse_engineering/node_modules/packet-reader/README.md create mode 100644 reverse_engineering/node_modules/packet-reader/index.js create mode 100644 reverse_engineering/node_modules/packet-reader/package.json create mode 100644 reverse_engineering/node_modules/packet-reader/test/index.js create mode 100644 reverse_engineering/node_modules/pg-connection-string/LICENSE create mode 100644 reverse_engineering/node_modules/pg-connection-string/README.md create mode 100644 reverse_engineering/node_modules/pg-connection-string/index.d.ts create mode 100644 reverse_engineering/node_modules/pg-connection-string/index.js create mode 100644 reverse_engineering/node_modules/pg-connection-string/package.json create mode 100644 reverse_engineering/node_modules/pg-int8/LICENSE create mode 100644 reverse_engineering/node_modules/pg-int8/README.md create mode 100644 reverse_engineering/node_modules/pg-int8/index.js create mode 100644 reverse_engineering/node_modules/pg-int8/package.json create mode 100644 reverse_engineering/node_modules/pg-pool/LICENSE create mode 100644 reverse_engineering/node_modules/pg-pool/README.md create mode 100644 reverse_engineering/node_modules/pg-pool/index.js create mode 100644 reverse_engineering/node_modules/pg-pool/package.json create mode 100644 reverse_engineering/node_modules/pg-pool/test/bring-your-own-promise.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/connection-strings.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/connection-timeout.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/ending.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/error-handling.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/events.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/idle-timeout-exit.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/idle-timeout.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/index.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/logging.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/max-uses.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/releasing-clients.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/setup.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/sizing.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/submittable.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/timeout.js create mode 100644 reverse_engineering/node_modules/pg-pool/test/verify.js create mode 100644 reverse_engineering/node_modules/pg-protocol/LICENSE create mode 100644 reverse_engineering/node_modules/pg-protocol/README.md create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/b.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/b.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/b.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/index.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/index.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/index.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/messages.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/messages.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/messages.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/parser.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/parser.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/parser.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/serializer.d.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/serializer.js create mode 100644 reverse_engineering/node_modules/pg-protocol/dist/serializer.js.map create mode 100644 reverse_engineering/node_modules/pg-protocol/package.json create mode 100644 reverse_engineering/node_modules/pg-protocol/src/b.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/buffer-reader.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/buffer-writer.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/inbound-parser.test.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/index.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/messages.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/outbound-serializer.test.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/parser.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/serializer.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/testing/buffer-list.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/testing/test-buffers.ts create mode 100644 reverse_engineering/node_modules/pg-protocol/src/types/chunky.d.ts create mode 100644 reverse_engineering/node_modules/pg-types/.travis.yml create mode 100644 reverse_engineering/node_modules/pg-types/Makefile create mode 100644 reverse_engineering/node_modules/pg-types/README.md create mode 100644 reverse_engineering/node_modules/pg-types/index.d.ts create mode 100644 reverse_engineering/node_modules/pg-types/index.js create mode 100644 reverse_engineering/node_modules/pg-types/index.test-d.ts create mode 100644 reverse_engineering/node_modules/pg-types/lib/arrayParser.js create mode 100644 reverse_engineering/node_modules/pg-types/lib/binaryParsers.js create mode 100644 reverse_engineering/node_modules/pg-types/lib/builtins.js create mode 100644 reverse_engineering/node_modules/pg-types/lib/textParsers.js create mode 100644 reverse_engineering/node_modules/pg-types/package.json create mode 100644 reverse_engineering/node_modules/pg-types/test/index.js create mode 100644 reverse_engineering/node_modules/pg-types/test/types.js create mode 100644 reverse_engineering/node_modules/pg/LICENSE create mode 100644 reverse_engineering/node_modules/pg/README.md create mode 100644 reverse_engineering/node_modules/pg/lib/client.js create mode 100644 reverse_engineering/node_modules/pg/lib/connection-parameters.js create mode 100644 reverse_engineering/node_modules/pg/lib/connection.js create mode 100644 reverse_engineering/node_modules/pg/lib/defaults.js create mode 100644 reverse_engineering/node_modules/pg/lib/index.js create mode 100644 reverse_engineering/node_modules/pg/lib/native/client.js create mode 100644 reverse_engineering/node_modules/pg/lib/native/index.js create mode 100644 reverse_engineering/node_modules/pg/lib/native/query.js create mode 100644 reverse_engineering/node_modules/pg/lib/query.js create mode 100644 reverse_engineering/node_modules/pg/lib/result.js create mode 100644 reverse_engineering/node_modules/pg/lib/sasl.js create mode 100644 reverse_engineering/node_modules/pg/lib/type-overrides.js create mode 100644 reverse_engineering/node_modules/pg/lib/utils.js create mode 100644 reverse_engineering/node_modules/pg/package.json create mode 100644 reverse_engineering/node_modules/pgpass/README.md create mode 100644 reverse_engineering/node_modules/pgpass/lib/helper.js create mode 100644 reverse_engineering/node_modules/pgpass/lib/index.js create mode 100644 reverse_engineering/node_modules/pgpass/package.json create mode 100644 reverse_engineering/node_modules/postgres-array/index.d.ts create mode 100644 reverse_engineering/node_modules/postgres-array/index.js create mode 100644 reverse_engineering/node_modules/postgres-array/license create mode 100644 reverse_engineering/node_modules/postgres-array/package.json create mode 100644 reverse_engineering/node_modules/postgres-array/readme.md create mode 100644 reverse_engineering/node_modules/postgres-bytea/index.js create mode 100644 reverse_engineering/node_modules/postgres-bytea/license create mode 100644 reverse_engineering/node_modules/postgres-bytea/package.json create mode 100644 reverse_engineering/node_modules/postgres-bytea/readme.md create mode 100644 reverse_engineering/node_modules/postgres-date/index.js create mode 100644 reverse_engineering/node_modules/postgres-date/license create mode 100644 reverse_engineering/node_modules/postgres-date/package.json create mode 100644 reverse_engineering/node_modules/postgres-date/readme.md create mode 100644 reverse_engineering/node_modules/postgres-interval/index.d.ts create mode 100644 reverse_engineering/node_modules/postgres-interval/index.js create mode 100644 reverse_engineering/node_modules/postgres-interval/license create mode 100644 reverse_engineering/node_modules/postgres-interval/package.json create mode 100644 reverse_engineering/node_modules/postgres-interval/readme.md create mode 100644 reverse_engineering/node_modules/readable-stream/CONTRIBUTING.md create mode 100644 reverse_engineering/node_modules/readable-stream/GOVERNANCE.md create mode 100644 reverse_engineering/node_modules/readable-stream/LICENSE create mode 100644 reverse_engineering/node_modules/readable-stream/README.md create mode 100644 reverse_engineering/node_modules/readable-stream/errors-browser.js create mode 100644 reverse_engineering/node_modules/readable-stream/errors.js create mode 100644 reverse_engineering/node_modules/readable-stream/experimentalWarning.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/_stream_duplex.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/_stream_passthrough.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/_stream_readable.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/_stream_transform.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/_stream_writable.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/async_iterator.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/buffer_list.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/destroy.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/end-of-stream.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/from-browser.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/from.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/pipeline.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/state.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream-browser.js create mode 100644 reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream.js create mode 100644 reverse_engineering/node_modules/readable-stream/package.json create mode 100644 reverse_engineering/node_modules/readable-stream/readable-browser.js create mode 100644 reverse_engineering/node_modules/readable-stream/readable.js create mode 100644 reverse_engineering/node_modules/safe-buffer/LICENSE create mode 100644 reverse_engineering/node_modules/safe-buffer/README.md create mode 100644 reverse_engineering/node_modules/safe-buffer/index.d.ts create mode 100644 reverse_engineering/node_modules/safe-buffer/index.js create mode 100644 reverse_engineering/node_modules/safe-buffer/package.json create mode 100644 reverse_engineering/node_modules/safer-buffer/LICENSE create mode 100644 reverse_engineering/node_modules/safer-buffer/Porting-Buffer.md create mode 100644 reverse_engineering/node_modules/safer-buffer/Readme.md create mode 100644 reverse_engineering/node_modules/safer-buffer/dangerous.js create mode 100644 reverse_engineering/node_modules/safer-buffer/package.json create mode 100644 reverse_engineering/node_modules/safer-buffer/safer.js create mode 100644 reverse_engineering/node_modules/safer-buffer/tests.js create mode 100644 reverse_engineering/node_modules/semver/CHANGELOG.md create mode 100644 reverse_engineering/node_modules/semver/LICENSE create mode 100644 reverse_engineering/node_modules/semver/README.md create mode 100755 reverse_engineering/node_modules/semver/bin/semver create mode 100644 reverse_engineering/node_modules/semver/package.json create mode 100644 reverse_engineering/node_modules/semver/range.bnf create mode 100644 reverse_engineering/node_modules/semver/semver.js create mode 100644 reverse_engineering/node_modules/split2/LICENSE create mode 100644 reverse_engineering/node_modules/split2/README.md create mode 100644 reverse_engineering/node_modules/split2/bench.js create mode 100644 reverse_engineering/node_modules/split2/index.js create mode 100644 reverse_engineering/node_modules/split2/package.json create mode 100644 reverse_engineering/node_modules/split2/test.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/.travis.yml create mode 100644 reverse_engineering/node_modules/ssh2-streams/LICENSE create mode 100644 reverse_engineering/node_modules/ssh2-streams/README.md create mode 100644 reverse_engineering/node_modules/ssh2-streams/SFTPStream.md create mode 100644 reverse_engineering/node_modules/ssh2-streams/index.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/lib/constants.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/lib/jsbn.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/lib/keyParser.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/lib/sftp.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/lib/ssh.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/lib/utils.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/package.json create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-dsa.ppk create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-rsa.ppk create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa_enc create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/fixtures/ssh_host_rsa_key create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-durability-ssh.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-kexdh.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-keyparser.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-packet60.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-sftp.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-ssh.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test-utils.js create mode 100644 reverse_engineering/node_modules/ssh2-streams/test/test.js create mode 100644 reverse_engineering/node_modules/ssh2/.travis.yml create mode 100644 reverse_engineering/node_modules/ssh2/LICENSE create mode 100644 reverse_engineering/node_modules/ssh2/README.md create mode 100644 reverse_engineering/node_modules/ssh2/examples/server-chat.js create mode 100644 reverse_engineering/node_modules/ssh2/examples/sftp-server-download-only.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/Channel.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/SFTPWrapper.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/agent.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/client.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/keepalivemgr.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/server.js create mode 100644 reverse_engineering/node_modules/ssh2/lib/utils.js create mode 100644 reverse_engineering/node_modules/ssh2/package.json create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/bad_rsa_private_key create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/id_dsa create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/id_ecdsa create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa.ppk create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa_enc create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_dsa_key create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_ecdsa_key create mode 100644 reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_rsa_key create mode 100644 reverse_engineering/node_modules/ssh2/test/test-client-server.js create mode 100644 reverse_engineering/node_modules/ssh2/test/test-openssh.js create mode 100644 reverse_engineering/node_modules/ssh2/test/test.js create mode 100644 reverse_engineering/node_modules/ssh2/util/build_pagent.bat create mode 100644 reverse_engineering/node_modules/ssh2/util/pagent.c create mode 100644 reverse_engineering/node_modules/ssh2/util/pagent.exe create mode 100644 reverse_engineering/node_modules/streamsearch/LICENSE create mode 100644 reverse_engineering/node_modules/streamsearch/README.md create mode 100644 reverse_engineering/node_modules/streamsearch/lib/sbmh.js create mode 100644 reverse_engineering/node_modules/streamsearch/package.json create mode 100644 reverse_engineering/node_modules/string_decoder/LICENSE create mode 100644 reverse_engineering/node_modules/string_decoder/README.md create mode 100644 reverse_engineering/node_modules/string_decoder/lib/string_decoder.js create mode 100644 reverse_engineering/node_modules/string_decoder/package.json create mode 100644 reverse_engineering/node_modules/tunnel-ssh/.eslintignore create mode 100644 reverse_engineering/node_modules/tunnel-ssh/README.md create mode 100644 reverse_engineering/node_modules/tunnel-ssh/examples/default.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive_error.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/examples/loginError.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/examples/server/index.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/index.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/lib/config.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/package.json create mode 100644 reverse_engineering/node_modules/tunnel-ssh/rewrite.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/test/config-spec.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/test/main-spec.js create mode 100644 reverse_engineering/node_modules/tunnel-ssh/test/server/index.js create mode 100644 reverse_engineering/node_modules/util-deprecate/History.md create mode 100644 reverse_engineering/node_modules/util-deprecate/LICENSE create mode 100644 reverse_engineering/node_modules/util-deprecate/README.md create mode 100644 reverse_engineering/node_modules/util-deprecate/browser.js create mode 100644 reverse_engineering/node_modules/util-deprecate/node.js create mode 100644 reverse_engineering/node_modules/util-deprecate/package.json create mode 100644 reverse_engineering/node_modules/xtend/.jshintrc create mode 100644 reverse_engineering/node_modules/xtend/LICENSE create mode 100644 reverse_engineering/node_modules/xtend/README.md create mode 100644 reverse_engineering/node_modules/xtend/immutable.js create mode 100644 reverse_engineering/node_modules/xtend/mutable.js create mode 100644 reverse_engineering/node_modules/xtend/package.json create mode 100644 reverse_engineering/node_modules/xtend/test.js diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index fa9c277..05e2d3e 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -213,7 +213,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Collation rule", - "propertyKeyword": "constraintName", + "propertyKeyword": "collationRule", "propertyTooltip": "If different than default. For example us_EN or be_FR", "propertyType": "text", "dependency": { @@ -510,7 +510,7 @@ making sure that you maintain a proper JSON format. "numeric", "real", "double precision", - "small serial", + "smallserial", "serial", "bigserial", "money" @@ -678,7 +678,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -997,7 +997,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -1375,7 +1375,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -1674,7 +1674,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -1729,7 +1729,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -2090,7 +2090,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -2151,7 +2151,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -2445,7 +2445,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -2760,7 +2760,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -3054,7 +3054,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -3116,7 +3116,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 7bfad72..e08ec6c 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,239 +1,20 @@ 'use strict'; -const connectionHelper = require('./helpers/connectionHelper'); -const mariadbHelper = require('./helpers/mariadbHelper'); - -BigInt.prototype.toJSON = function () { - return Number(this.valueOf()); -} - -const ACCESS_DENIED_ERROR = 1045; - module.exports = { - async connect(connectionInfo) { - const connection = await connectionHelper.connect(connectionInfo); - - return connection; - }, - disconnect(connectionInfo, logger, callback, app) { - connectionHelper.close(); - - callback(); + }, async testConnection(connectionInfo, logger, callback, app) { - const log = createLogger({ - title: 'Test connection', - hiddenKeys: connectionInfo.hiddenKeys, - logger, - }); - try { - logger.clear(); - logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); - - const connection = await this.connect(connectionInfo); - await connection.ping(); - - log.info('Connected successfully'); - - callback(null); - } catch(error) { - log.error(error); - if (error.errno === ACCESS_DENIED_ERROR) { - callback({ message: `Access denied for user "${connectionInfo.userName}". Please, check whether the password is correct and the user has enough permissions to connect to the database server.`, stack: error.stack }); - } else { - callback({ message: error.message, stack: error.stack }); - } - } }, async getDbCollectionsNames(connectionInfo, logger, callback, app) { - const log = createLogger({ - title: 'Retrieving databases and tables information', - hiddenKeys: connectionInfo.hiddenKeys, - logger, - }); - - try { - logger.clear(); - logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); - const systemDatabases = connectionInfo.includeSystemCollection ? [] : ['information_schema', 'mysql', 'performance_schema']; - - const connection = await this.connect(connectionInfo); - const databases = connectionInfo.databaseName ? [connectionInfo.databaseName] : await connectionHelper.getDatabases(connection, systemDatabases); - - const collections = await databases.reduce(async (next, dbName) => { - const result = await next; - try { - const entities = await connectionHelper.getTables(connection, dbName); - const dbCollections = getDbCollectionNames(entities, dbName, connectionInfo.includeSystemCollection); - - return result.concat({ - dbName, - dbCollections, - isEmpty: dbCollections.length === 0, - }); - } catch (error) { - log.info(`Error reading database "${dbName}"`); - log.error(error); - - return result.concat({ - dbName, - dbCollections: [], - isEmpty: true, - status: true, - }); - } - }, Promise.resolve([])); - log.info('Names retrieved successfully'); - - callback(null, collections); - } catch(error) { - log.error(error); - callback({ message: error.message, stack: error.stack }); - } }, async getDbCollectionsData(data, logger, callback, app) { - const _ = app.require('lodash'); - const async = app.require('async'); - const log = createLogger({ - title: 'Reverse-engineering process', - hiddenKeys: data.hiddenKeys, - logger, - }); - - try { - logger.log('info', data, 'data', data.hiddenKeys); - - const collections = data.collectionData.collections; - const dataBaseNames = data.collectionData.dataBaseNames; - const connection = await this.connect(data); - const instance = await connectionHelper.createInstance(connection, logger); - - log.info('MariaDB version: ' + connection.serverVersion()); - log.progress('Start reverse engineering ...'); - - const result = await async.mapSeries(dataBaseNames, async (dbName) => { - const tables = (collections[dbName] || []).filter(name => !isViewName(name)); - const views = (collections[dbName] || []).filter(isViewName).map(getViewName); - - log.info(`Parsing database "${dbName}"`); - log.progress(`Parsing database "${dbName}"`, dbName); - - const containerData = mariadbHelper.parseDatabaseStatement( - await instance.describeDatabase(dbName) - ); - - log.info(`Parsing functions`); - log.progress(`Parsing functions`, dbName); - - const UDFs = mariadbHelper.parseFunctions( - await instance.getFunctions(dbName) - ); - - log.info(`Parsing procedures`); - log.progress(`Parsing procedures`, dbName); - - const Procedures = mariadbHelper.parseProcedures( - await instance.getProcedures(dbName) - ); - - const result = await async.mapSeries(tables, async (tableName) => { - log.info(`Get columns "${tableName}"`); - log.progress(`Get columns`, dbName, tableName); - - const columns = await instance.getColumns(dbName, tableName); - let records = []; - - if (containsJson(columns)) { - log.info(`Sampling table "${tableName}"`); - log.progress(`Sampling table`, dbName, tableName); - - const count = await instance.getCount(dbName, tableName); - records = await instance.getRecords(dbName, tableName, getLimit(count, data.recordSamplingSettings)); - } - - log.info(`Get create table statement "${tableName}"`); - log.progress(`Get create table statement`, dbName, tableName); - - const ddl = await instance.showCreateTable(dbName, tableName); - - log.info(`Get indexes "${tableName}"`); - log.progress(`Get indexes`, dbName, tableName); - - const indexes = await instance.getIndexes(dbName, tableName); - - log.info(`Get constraints "${tableName}"`); - log.progress(`Get constraints`, dbName, tableName); - - const constraints = await instance.getConstraints(dbName, tableName); - const jsonSchema = mariadbHelper.getJsonSchema({ columns, constraints, records, indexes }); - const Indxs = mariadbHelper.parseIndexes(indexes); - - log.info(`Data retrieved successfully "${tableName}"`); - log.progress(`Data retrieved successfully`, dbName, tableName); - - return { - dbName: dbName, - collectionName: tableName, - entityLevel: { - Indxs, - }, - documents: records, - views: [], - standardDoc: records[0], - ddl: { - script: ddl, - type: 'mariadb' - }, - emptyBucket: false, - validation: { - jsonSchema - }, - bucketInfo: { - ...containerData, - UDFs, - Procedures, - }, - }; - }); - - const viewData = await async.mapSeries(views, async (viewName) => { - log.info(`Getting data from view "${viewName}"`); - log.progress(`Getting data from view`, dbName, viewName); - - const ddl = await instance.showCreateView(dbName, viewName); - - return { - name: viewName, - ddl: { - script: ddl, - type: 'mariadb' - } - }; - }); - - if (viewData.length) { - return [...result, { - dbName: dbName, - views: viewData, - emptyBucket: false, - }]; - } - - return result; - }); - - - callback(null, result.flat()); - } catch(error) { - log.error(error); - callback({ message: error.message, stack: error.stack }); - } + }, }; @@ -255,49 +36,3 @@ const createLogger = ({ title, logger, hiddenKeys }) => { } }; }; - -const getDbCollectionNames = (entities, dbName, includeSystemCollection) => { - const isView = (type) => { - return ['VIEW'].includes(type); - }; - - return entities.filter(table => { - if (table['Table_type'] === 'SYSTEM VIEW') { - return false; - } - - if (includeSystemCollection) { - return true; - } - - const isSystem = !['BASE TABLE', 'VIEW', 'SEQUENCE'].includes(table['Table_type']); - - return !isSystem; - }).map(table => { - const name = table[`Tables_in_${dbName}`]; - - if (isView(table['Table_type'])) { - return `${name} (v)`; - } else { - return name; - } - }); -}; - -const getLimit = (count, recordSamplingSettings) => { - const per = recordSamplingSettings.relative.value; - const size = (recordSamplingSettings.active === 'absolute') - ? recordSamplingSettings.absolute.value - : Math.round(count / 100 * per); - return size; -}; - -const isViewName = (name) => { - return /\ \(v\)$/i.test(name); -}; - -const getViewName = (name) => name.replace(/\ \(v\)$/i, ''); - -const containsJson = (columns) => { - return columns.some(column => column['Type'] === 'longtext' || column['Type'] === 'json'); -}; diff --git a/reverse_engineering/helpers/connectionHelper.js b/reverse_engineering/helpers/connectionHelper.js deleted file mode 100644 index 6e8b2f3..0000000 --- a/reverse_engineering/helpers/connectionHelper.js +++ /dev/null @@ -1,246 +0,0 @@ -const mariadb = require('mariadb'); -const fs = require('fs'); -const ssh = require('tunnel-ssh'); - -let connection; -let sshTunnel; - -const getSshConfig = (info) => { - const config = { - username: info.ssh_user, - host: info.ssh_host, - port: info.ssh_port, - dstHost: info.host, - dstPort: info.port, - localHost: '127.0.0.1', - localPort: info.port, - keepAlive: true - }; - - if (info.ssh_method === 'privateKey') { - return Object.assign({}, config, { - privateKey: fs.readFileSync(info.ssh_key_file), - passphrase: info.ssh_key_passphrase - }); - } else { - return Object.assign({}, config, { - password: info.ssh_password - }); - } -}; - -const connectViaSsh = (info) => new Promise((resolve, reject) => { - ssh(getSshConfig(info), (err, tunnel) => { - if (err) { - reject(err); - } else { - resolve({ - tunnel, - info: Object.assign({}, info, { - host: '127.0.0.1', - }) - }); - } - }); -}); - -const getSslOptions = (connectionInfo) => { - if (connectionInfo.sslType === 'Off') { - return false; - } - - if (connectionInfo.sslType === 'Unvalidated') { - return { - rejectUnauthorized: false - }; - } - - if (connectionInfo.sslType === 'TRUST_CUSTOM_CA_SIGNED_CERTIFICATES') { - return { - ca: fs.readFileSync(connectionInfo.certAuthority), - }; - } - - if (connectionInfo.sslType === 'TRUST_SERVER_CLIENT_CERTIFICATES') { - return { - ca: fs.readFileSync(connectionInfo.certAuthority), - cert: fs.readFileSync(connectionInfo.clientCert), - key: fs.readFileSync(connectionInfo.clientPrivateKey), - }; - } -}; - -const createConnection = async (connectionInfo) => { - if (connectionInfo.ssh) { - const { info, tunnel } = await connectViaSsh(connectionInfo); - sshTunnel = tunnel; - connectionInfo = info; - } - - return await mariadb.createConnection({ - host: connectionInfo.host, - user: connectionInfo.userName, - password: connectionInfo.userPassword, - port: connectionInfo.port, - metaAsArray: false, - ssl: getSslOptions(connectionInfo), - dateStrings: true , - supportBigInt: true, - autoJsonMap: false, - connectTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, - database: connectionInfo.databaseName, - }); -}; - -const connect = async (connectionInfo) => { - if (connection) { - return connection; - } - - connection = await createConnection(connectionInfo); - - return connection; -}; - -const getDatabases = async (connection, systemDatabases) => { - const databases = await connection.query('show databases;'); - - return databases.map(item => item.Database).filter(dbName => !systemDatabases.includes(dbName)); -}; - -const getTables = async (connection, dbName) => { - const tables = await connection.query(`show full tables from \`${dbName}\`;`); - - return tables; -}; - -const createInstance = (connection, logger) => { - const getCount = async (dbName, tableName) => { - const count = await connection.query(`SELECT COUNT(*) as count FROM \`${dbName}\`.\`${tableName}\`;`); - - return Number(count[0]?.count || 0); - }; - - const getRecords = async (dbName, tableName, limit) => { - const result = await connection.query({ - sql: `SELECT * FROM \`${dbName}\`.\`${tableName}\` LIMIT ${limit};` - }); - - return result; - }; - - const getVersion = async () => { - const version = await connection.query('select version() as version;'); - - return version[0].version; - }; - - const describeDatabase = async (dbName) => { - const data = await connection.query(`show create database \`${dbName}\`;`); - - return data[0]['Create Database']; - }; - - const getFunctions = async (dbName) => { - const functions = await connection.query(`show function status WHERE Db = '${dbName}'`); - - return Promise.all( - functions.map( - f => connection.query(`show create function \`${dbName}\`.\`${f.Name}\`;`).then(functionCode => ({ - meta: f, - data: functionCode, - })) - ) - ); - }; - - const getProcedures = async (dbName) => { - const functions = await connection.query(`show procedure status WHERE Db = '${dbName}'`); - - return Promise.all( - functions.map( - f => connection.query(`show create procedure \`${dbName}\`.\`${f.Name}\`;`).then(functionCode => ({ - meta: f, - data: functionCode, - })) - ) - ); - }; - - const showCreateTable = async (dbName, tableName) => { - const result = await connection.query(`show create table \`${dbName}\`.\`${tableName}\`;`); - - return result[0]?.['Create Table']; - }; - - const getConstraints = async (dbName, tableName) => { - try { - const result = await connection.query(`select * from information_schema.check_constraints where CONSTRAINT_SCHEMA='${dbName}' AND TABLE_NAME='${tableName}';`); - - return result; - } catch (error) { - logger.log('error', { - message: '[Warning] ' + error.message, - stack: error.stack, - }); - return []; - } - }; - - const getColumns = async (dbName, tableName) => { - const result = await connection.query(`show fields from \`${dbName}\`.\`${tableName}\`;`); - - return result; - }; - - const getIndexes = async (dbName, tableName) => { - const result = await connection.query(`show index from \`${tableName}\` from \`${dbName}\`;`); - - return result; - }; - - const showCreateView = async (dbName, viewName) => { - const result = await connection.query(`show create view \`${dbName}\`.\`${viewName}\`;`); - - return result[0]?.['Create View']; - }; - - const query = (sql) => { - return connection.query(sql); - }; - - return { - getCount, - getRecords, - getVersion, - describeDatabase, - getFunctions, - getProcedures, - showCreateTable, - getConstraints, - getColumns, - getIndexes, - showCreateView, - query, - }; -}; - -const close = () => { - if (connection) { - connection.end(); - connection = null; - } - - if (sshTunnel) { - sshTunnel.close(); - sshTunnel = null; - } -}; - -module.exports = { - connect, - getDatabases, - getTables, - createInstance, - close, -}; diff --git a/reverse_engineering/helpers/mariadbHelper.js b/reverse_engineering/helpers/mariadbHelper.js deleted file mode 100644 index 503db4a..0000000 --- a/reverse_engineering/helpers/mariadbHelper.js +++ /dev/null @@ -1,310 +0,0 @@ -const functionHelper = require("./parsers/functionHelper"); -const procedureHelper = require("./parsers/procedureHelper"); - -const parseDatabaseStatement = (statement) => { - const characterSetRegExp = /CHARACTER\ SET\ (.+?)\ /i; - const collationRegExp = /COLLATE\ (.+?)\ /i; - const commentRegExp = /COMMENT\ \'([\s\S]*?)\'/i; - const data = {}; - - if (characterSetRegExp.test(statement)) { - data.characterSet = statement.match(characterSetRegExp)[1]; - } - - if (collationRegExp.test(statement)) { - data.collation = statement.match(collationRegExp)[1]; - } - - if (commentRegExp.test(statement)) { - data.description = statement.match(commentRegExp)[1]; - } - - return data; -}; - -const parseFunctions = (functions) => { - return functions.map(f => { - const query = f.data[0]['Create Function']; - - try { - const func = functionHelper.parseFunctionQuery(String(query)); - - return { - name: f.meta['Name'], - functionDelimiter: (func.body || '').includes(';') ? '$$' : '', - functionOrReplace: func.orReplace, - functionAggregate: func.isAggregate, - functionIfNotExist: func.ifNotExists, - functionArguments: func.parameters, - functionDataType: func.returnType, - functionBody: func.body, - functionLanguage: 'SQL', - functionDeterministic: functionHelper.getDeterministic(func.characteristics), - functionContains: functionHelper.getContains(func.characteristics), - functionSqlSecurity: f.meta['Security_type'], - functionDescription: f.meta['Comment'], - }; - } catch (error) { - throw { - message: error.message + '.\nError parsing function: ' + query, - stack: error.stack, - }; - } - }); -}; - -const parseProcedures = (procedures) => { - return procedures.map(procedure => { - try { - const meta = procedure.meta; - const procValue = procedure.data[0]['Create Procedure']; - const data = procedureHelper.parseProcedure(String(procValue)); - - return { - name: meta['Name'], - delimiter: (data.body || '').includes(';') ? '$$' : '', - orReplace: data.orReplace, - inputArgs: data.parameters, - body: data.body, - language: 'SQL', - deterministic: data.deterministic, - contains: data.contains, - securityMode: meta['Security_type'], - comments: meta['Comment'] - }; - } catch (error) { - throw { - message: error.message + '.\nError parsing procedure: ' + procedure.data[0]['Create Procedure'], - stack: error.stack, - }; - } - }); -}; - -const isJson = (columnName, constraints) => { - return constraints.some(constraint => { - const check = constraint['CHECK_CLAUSE']; - - if (!/json_valid/i.test(check)) { - return false; - } - - return check.includes(`\`${columnName}\``); - }); -}; - -const findJsonRecord = (fieldName, records) => { - return records.find(records => { - if (typeof records[fieldName] !== 'string') { - return false; - } - - try { - return JSON.parse(records[fieldName]); - } catch (e) { - return false; - } - }); -}; - -const getSubtype = (fieldName, record) => { - const item = JSON.parse(record[fieldName]); - - if (!item) { - return ' '; - } - - if (Array.isArray(item)) { - return 'array'; - } - - if (typeof item === 'object') { - return 'object'; - } - - return ' '; -}; - -const addKeyOptions = (jsonSchema, indexes) => { - const primaryIndexes = indexes.filter(index => getIndexType(index) === 'PRIMARY'); - const uniqueIndexes = indexes.filter(index => getIndexType(index) === 'UNIQUE'); - const { single } = uniqueIndexes.reduce(({single, composite, hash}, index) => { - const indexName = index['Key_name']; - if (!hash[indexName]) { - hash[indexName] = true; - - return { - single: single.concat(index), - composite, - hash, - }; - } else { - return { - single: single.filter(index => index['Key_name'] !== indexName), - composite: composite.concat(index), - hash, - }; - } - }, {composite: [], single: [], hash: {}}); - - jsonSchema = single.reduce((jsonSchema, index) => { - const columnName = index['Column_name']; - const uniqueKeyOptions = getIndexData(index); - - return { - ...jsonSchema, - properties: { - ...jsonSchema.properties, - [columnName]: { - ...(jsonSchema.properties[columnName] || {}), - uniqueKeyOptions: [ - ...((jsonSchema.properties[columnName] || {}).uniqueKeyOptions || []), - uniqueKeyOptions, - ], - } - } - }; - }, jsonSchema); - - if (primaryIndexes.length === 1) { - const primaryIndex = primaryIndexes[0]; - const columnName = primaryIndex['Column_name']; - const { constraintName, ...primaryKeyOptions } = getIndexData(primaryIndex); - - jsonSchema = { - ...jsonSchema, - properties: { - ...jsonSchema.properties, - [columnName]: { - ...(jsonSchema.properties[columnName] || {}), - primaryKeyOptions, - } - } - }; - } - - return jsonSchema; -}; - -const getIndexData = (index) => { - return { - constraintName: index['Key_name'], - indexCategory: getIndexCategory(index), - indexComment: index['Index_comment'], - indexOrder: getIndexOrder(index['Collation']), - indexIgnore: index['Ignored'] === 'YES' - }; -}; - -const getJsonSchema = ({ columns, constraints, records, indexes }) => { - const properties = columns.filter((column) => { - return column['Type'] === 'longtext'; - }).reduce((schema, column) => { - const fieldName = column['Field']; - const record = findJsonRecord(fieldName, records); - const isJsonSynonym = isJson(fieldName, constraints); - const subtype = record ? getSubtype(fieldName, record) : ' '; - const synonym = isJsonSynonym ? 'json' : ''; - - if (!synonym && subtype === ' ') { - return schema; - } - - return { - ...schema, - [fieldName]: { - type: 'char', - mode: 'longtext', - synonym, - subtype, - } - }; - }, {}); - - return addKeyOptions({ - properties, - }, indexes); -}; - -const getIndexOrder = (collation) => { - if (collation === 'A') { - return 'ASC'; - } else if (collation === 'D') { - return 'DESC'; - } else { - return null; - } -}; - -const getIndexType = (index) => { - if (index['Key_name'] === 'PRIMARY') { - return 'PRIMARY'; - } else if (index['Index_type'] === 'FULLTEXT') { - return 'FULLTEXT'; - } else if (index['Index_type'] === 'SPATIAL') { - return 'SPATIAL'; - } else if (Number(index['Non_unique']) === 0) { - return 'UNIQUE'; - } else if (index['Index_type'] === 'KEY') { - return 'KEY'; - } else { - return ''; - } -}; - -const getIndexCategory = (index) => { - if (index['Index_type'] === 'BTREE') { - return 'BTREE'; - } else if (index['Index_type'] === 'HASH') { - return 'HASH'; - } else if (index['Index_type'] === 'RTREE') { - return 'RTREE'; - } else { - return ''; - } -}; - -const parseIndexes = (indexes) => { - const indexesByConstraint = indexes.filter(index => !['PRIMARY', 'UNIQUE'].includes(getIndexType(index))).reduce((result, index) => { - const constraintName = index['Key_name']; - - if (result[constraintName]) { - return { - ...result, - [constraintName]: { - ...result[constraintName], - indxKey: result[constraintName].indxKey.concat({ - name: index['Column_name'], - type: getIndexOrder(index['Collation']), - }), - }, - }; - } - - const indexData = { - indxName: constraintName, - indexType: getIndexType(index), - indexCategory: getIndexCategory(index), - indexComment: index['Index_comment'], - indxKey: [{ - name: index['Column_name'], - type: getIndexOrder(index['Collation']), - }], - }; - - return { - ...result, - [constraintName]: indexData, - }; - }, {}); - - return Object.values(indexesByConstraint); -}; - -module.exports = { - parseDatabaseStatement, - parseFunctions, - parseProcedures, - getJsonSchema, - parseIndexes, -}; diff --git a/reverse_engineering/helpers/parsers/functionHelper.js b/reverse_engineering/helpers/parsers/functionHelper.js deleted file mode 100644 index 301c1d5..0000000 --- a/reverse_engineering/helpers/parsers/functionHelper.js +++ /dev/null @@ -1,92 +0,0 @@ - -const parseFunctionQuery = (query) => { - const parseRegexp = /create(?\s+or\s+replace)?(?\s+definer\s*=[\s\S]+?)?(?\s+aggregate)?\s+function(?\s+if not exists)?\s+\`(?[\s\S]+?)\`\s*\((?[\s\S]*?)\)\s+returns\s+(?[a-z0-9\(\)]+)(?(\s*language\s+sql)?(\s*(not)?\s+deterministic)?(\s*contains\s+(sql|no\s+sql|reads\s+sql\s+data|modifies\s+sql\s+data))?(\s*sql\s+security\s+(definer|invoker))?(\s*comment\s+\'[\s\S]+?\')?(\s*charset\s+[\S\s]+?)?(\s*COLLATE\s+[\S\s]+?)?)?\s+(?(begin|return)([\s\S]+))/i; - - if (!parseRegexp.test(query)) { - return {}; - } - - const result = String(query).match(parseRegexp); - const { - orReplace, - definer, - aggregate, - ifNotExists, - funcName, - funcParameters, - returnType, - characteristics, - funcBody, - } = result.groups; - - return { - orReplace: Boolean(orReplace), - definer: definer, - isAggregate: Boolean(aggregate), - ifNotExists: Boolean(ifNotExists), - name: funcName, - parameters: funcParameters, - returnType: returnType, - characteristics: characteristics || '', - body: funcBody || '', - }; -}; - -const getLanguage = (characteristics) => { - return /language sql/i.test(characteristics) ? 'SQL' : ''; -} - -const getDeterministic = (characteristics) => { - if (/not deterministic/i.test(characteristics)) { - return 'NOT DETERMINISTIC'; - } else if (/deterministic/i.test(characteristics)) { - return 'DETERMINISTIC'; - } else { - return ''; - } -}; - -const getContains = (characteristics) => { - if (/contains\s+sql/i.test(characteristics)) { - return 'SQL'; - } else if (/contains\s+no\s+sql/i.test(characteristics)) { - return 'NO SQL'; - } else if (/contains\s+reads\s+sql\s+data/i.test(characteristics)) { - return 'READS SQL DATA'; - } else if (/contains\s+modifies\s+sql\s+data/i.test(characteristics)) { - return 'MODIFIES SQL DATA'; - } else { - return ''; - } -}; - -const getDefiner = (characteristics) => { - if (/SQL\s+SECURITY\s+DEFINER/i.test(characteristics)) { - return 'DEFINER'; - } else if (/SQL\s+SECURITY\s+INVOKER/i.test(characteristics)) { - return 'INVOKER'; - } else { - return ''; - } -}; - -const getComment = (characteristics) => { - const commentRegexp = /comment\s\'([\s\S]+?)\'/i; - - if (!commentRegexp.test(characteristics)) { - return ''; - } - - const result = characteristics.match(commentRegexp); - - return result[1] || ''; -} - -module.exports = { - parseFunctionQuery, - getLanguage, - getDeterministic, - getContains, - getDefiner, - getComment, -}; diff --git a/reverse_engineering/helpers/parsers/procedureHelper.js b/reverse_engineering/helpers/parsers/procedureHelper.js deleted file mode 100644 index 11d7624..0000000 --- a/reverse_engineering/helpers/parsers/procedureHelper.js +++ /dev/null @@ -1,90 +0,0 @@ - -const isCreateOrReplace = (query) => { - return /^create\s+or\s+replace/i.test((String(query || '')).trim()); -}; - -const getBodyAndParameters = (query) => { - const regExp = /procedure\s+\`[\s\S]+?\`\s*\((?[\S\s]*)\)\s+(?begin[\S\s]+)/i; - - if (!regExp.test(query)) { - return { - body: '', - parameters: '', - }; - } - - const result = query.match(regExp); - - return { - parameters: (result.groups['parameters'] || '').trim(), - body: (result.groups['body'] || '').trim(), - }; -}; - -const findAndReplaceCharacteristics = (query) => { - const characteristics = { - language: /language\s+sql/i, - deterministic: /(not\s+)?deterministic/i, - contains: /contains\s+(sql|no\s+sql|reads\s+sql\s+data|modifies\s+sql\s+data)/i, - security: /sql\s+security\s+(definer|invoker)/i, - comment: /comment\s+\'[\s\S]+?\'/i - }; - - return Object.keys(characteristics).reduce(([query, result], characteristic) => { - const regExp = characteristics[characteristic]; - - if (!regExp.test(query)) { - return [query, result]; - } - - return [ - query.replace(regExp, ''), - { - ...result, - [characteristic]: query.match(regExp).shift(), - }, - ]; - }, [ - query, - {} - ]); -}; - -const getDeterministic = (characteristic) => { - if (!characteristic) { - return ''; - } - - if ( /not\s+deterministic/i.test(characteristic)) { - return 'NOT DETERMINISTIC'; - } else { - return 'DETERMINISTIC'; - } -}; - -const getContains = (characteristic) => { - if (!characteristic) { - return ''; - } - - const data = characteristic.replace(/\s+/g, ' ').replace(/contains\s+/, ''); - - return data.toUpperCase(); -}; - -const parseProcedure = (query) => { - const [noCharacteristicsQuery, characteristics] = findAndReplaceCharacteristics(String(query)); - const { body, parameters } = getBodyAndParameters(String(noCharacteristicsQuery)); - - return { - body, - parameters, - contains: getContains(characteristics.contains), - deterministic: getDeterministic(characteristics.deterministic), - orReplace: isCreateOrReplace(query), - }; -}; - -module.exports = { - parseProcedure, -}; diff --git a/reverse_engineering/node_modules/.bin/semver b/reverse_engineering/node_modules/.bin/semver new file mode 120000 index 0000000..317eb29 --- /dev/null +++ b/reverse_engineering/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver \ No newline at end of file diff --git a/reverse_engineering/node_modules/asn1/LICENSE b/reverse_engineering/node_modules/asn1/LICENSE new file mode 100644 index 0000000..9b5dcdb --- /dev/null +++ b/reverse_engineering/node_modules/asn1/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2011 Mark Cavage, All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE diff --git a/reverse_engineering/node_modules/asn1/README.md b/reverse_engineering/node_modules/asn1/README.md new file mode 100644 index 0000000..2208210 --- /dev/null +++ b/reverse_engineering/node_modules/asn1/README.md @@ -0,0 +1,50 @@ +node-asn1 is a library for encoding and decoding ASN.1 datatypes in pure JS. +Currently BER encoding is supported; at some point I'll likely have to do DER. + +## Usage + +Mostly, if you're *actually* needing to read and write ASN.1, you probably don't +need this readme to explain what and why. If you have no idea what ASN.1 is, +see this: ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +The source is pretty much self-explanatory, and has read/write methods for the +common types out there. + +### Decoding + +The following reads an ASN.1 sequence with a boolean. + + var Ber = require('asn1').Ber; + + var reader = new Ber.Reader(Buffer.from([0x30, 0x03, 0x01, 0x01, 0xff])); + + reader.readSequence(); + console.log('Sequence len: ' + reader.length); + if (reader.peek() === Ber.Boolean) + console.log(reader.readBoolean()); + +### Encoding + +The following generates the same payload as above. + + var Ber = require('asn1').Ber; + + var writer = new Ber.Writer(); + + writer.startSequence(); + writer.writeBoolean(true); + writer.endSequence(); + + console.log(writer.buffer); + +## Installation + + npm install asn1 + +## License + +MIT. + +## Bugs + +See . diff --git a/reverse_engineering/node_modules/asn1/lib/ber/errors.js b/reverse_engineering/node_modules/asn1/lib/ber/errors.js new file mode 100644 index 0000000..4557b8a --- /dev/null +++ b/reverse_engineering/node_modules/asn1/lib/ber/errors.js @@ -0,0 +1,13 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + + newInvalidAsn1Error: function (msg) { + var e = new Error(); + e.name = 'InvalidAsn1Error'; + e.message = msg || ''; + return e; + } + +}; diff --git a/reverse_engineering/node_modules/asn1/lib/ber/index.js b/reverse_engineering/node_modules/asn1/lib/ber/index.js new file mode 100644 index 0000000..387d132 --- /dev/null +++ b/reverse_engineering/node_modules/asn1/lib/ber/index.js @@ -0,0 +1,27 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var errors = require('./errors'); +var types = require('./types'); + +var Reader = require('./reader'); +var Writer = require('./writer'); + + +// --- Exports + +module.exports = { + + Reader: Reader, + + Writer: Writer + +}; + +for (var t in types) { + if (types.hasOwnProperty(t)) + module.exports[t] = types[t]; +} +for (var e in errors) { + if (errors.hasOwnProperty(e)) + module.exports[e] = errors[e]; +} diff --git a/reverse_engineering/node_modules/asn1/lib/ber/reader.js b/reverse_engineering/node_modules/asn1/lib/ber/reader.js new file mode 100644 index 0000000..8a7e4ca --- /dev/null +++ b/reverse_engineering/node_modules/asn1/lib/ber/reader.js @@ -0,0 +1,262 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); +var Buffer = require('safer-buffer').Buffer; + +var ASN1 = require('./types'); +var errors = require('./errors'); + + +// --- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + + + +// --- API + +function Reader(data) { + if (!data || !Buffer.isBuffer(data)) + throw new TypeError('data must be a node Buffer'); + + this._buf = data; + this._size = data.length; + + // These hold the "current" state + this._len = 0; + this._offset = 0; +} + +Object.defineProperty(Reader.prototype, 'length', { + enumerable: true, + get: function () { return (this._len); } +}); + +Object.defineProperty(Reader.prototype, 'offset', { + enumerable: true, + get: function () { return (this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'remain', { + get: function () { return (this._size - this._offset); } +}); + +Object.defineProperty(Reader.prototype, 'buffer', { + get: function () { return (this._buf.slice(this._offset)); } +}); + + +/** + * Reads a single byte and advances offset; you can pass in `true` to make this + * a "peek" operation (i.e., get the byte, but don't advance the offset). + * + * @param {Boolean} peek true means don't move offset. + * @return {Number} the next byte, null if not enough data. + */ +Reader.prototype.readByte = function (peek) { + if (this._size - this._offset < 1) + return null; + + var b = this._buf[this._offset] & 0xff; + + if (!peek) + this._offset += 1; + + return b; +}; + + +Reader.prototype.peek = function () { + return this.readByte(true); +}; + + +/** + * Reads a (potentially) variable length off the BER buffer. This call is + * not really meant to be called directly, as callers have to manipulate + * the internal buffer afterwards. + * + * As a result of this call, you can call `Reader.length`, until the + * next thing called that does a readLength. + * + * @return {Number} the amount of offset to advance the buffer. + * @throws {InvalidAsn1Error} on bad ASN.1 + */ +Reader.prototype.readLength = function (offset) { + if (offset === undefined) + offset = this._offset; + + if (offset >= this._size) + return null; + + var lenB = this._buf[offset++] & 0xff; + if (lenB === null) + return null; + + if ((lenB & 0x80) === 0x80) { + lenB &= 0x7f; + + if (lenB === 0) + throw newInvalidAsn1Error('Indefinite length not supported'); + + if (lenB > 4) + throw newInvalidAsn1Error('encoding too long'); + + if (this._size - offset < lenB) + return null; + + this._len = 0; + for (var i = 0; i < lenB; i++) + this._len = (this._len << 8) + (this._buf[offset++] & 0xff); + + } else { + // Wasn't a variable length + this._len = lenB; + } + + return offset; +}; + + +/** + * Parses the next sequence in this BER buffer. + * + * To get the length of the sequence, call `Reader.length`. + * + * @return {Number} the sequence's tag. + */ +Reader.prototype.readSequence = function (tag) { + var seq = this.peek(); + if (seq === null) + return null; + if (tag !== undefined && tag !== seq) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + seq.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + this._offset = o; + return seq; +}; + + +Reader.prototype.readInt = function () { + return this._readTag(ASN1.Integer); +}; + + +Reader.prototype.readBoolean = function () { + return (this._readTag(ASN1.Boolean) === 0 ? false : true); +}; + + +Reader.prototype.readEnumeration = function () { + return this._readTag(ASN1.Enumeration); +}; + + +Reader.prototype.readString = function (tag, retbuf) { + if (!tag) + tag = ASN1.OctetString; + + var b = this.peek(); + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + + if (o === null) + return null; + + if (this.length > this._size - o) + return null; + + this._offset = o; + + if (this.length === 0) + return retbuf ? Buffer.alloc(0) : ''; + + var str = this._buf.slice(this._offset, this._offset + this.length); + this._offset += this.length; + + return retbuf ? str : str.toString('utf8'); +}; + +Reader.prototype.readOID = function (tag) { + if (!tag) + tag = ASN1.OID; + + var b = this.readString(tag, true); + if (b === null) + return null; + + var values = []; + var value = 0; + + for (var i = 0; i < b.length; i++) { + var byte = b[i] & 0xff; + + value <<= 7; + value += byte & 0x7f; + if ((byte & 0x80) === 0) { + values.push(value); + value = 0; + } + } + + value = values.shift(); + values.unshift(value % 40); + values.unshift((value / 40) >> 0); + + return values.join('.'); +}; + + +Reader.prototype._readTag = function (tag) { + assert.ok(tag !== undefined); + + var b = this.peek(); + + if (b === null) + return null; + + if (b !== tag) + throw newInvalidAsn1Error('Expected 0x' + tag.toString(16) + + ': got 0x' + b.toString(16)); + + var o = this.readLength(this._offset + 1); // stored in `length` + if (o === null) + return null; + + if (this.length > 4) + throw newInvalidAsn1Error('Integer too long: ' + this.length); + + if (this.length > this._size - o) + return null; + this._offset = o; + + var fb = this._buf[this._offset]; + var value = 0; + + for (var i = 0; i < this.length; i++) { + value <<= 8; + value |= (this._buf[this._offset++] & 0xff); + } + + if ((fb & 0x80) === 0x80 && i !== 4) + value -= (1 << (i * 8)); + + return value >> 0; +}; + + + +// --- Exported API + +module.exports = Reader; diff --git a/reverse_engineering/node_modules/asn1/lib/ber/types.js b/reverse_engineering/node_modules/asn1/lib/ber/types.js new file mode 100644 index 0000000..8aea000 --- /dev/null +++ b/reverse_engineering/node_modules/asn1/lib/ber/types.js @@ -0,0 +1,36 @@ +// Copyright 2011 Mark Cavage All rights reserved. + + +module.exports = { + EOC: 0, + Boolean: 1, + Integer: 2, + BitString: 3, + OctetString: 4, + Null: 5, + OID: 6, + ObjectDescriptor: 7, + External: 8, + Real: 9, // float + Enumeration: 10, + PDV: 11, + Utf8String: 12, + RelativeOID: 13, + Sequence: 16, + Set: 17, + NumericString: 18, + PrintableString: 19, + T61String: 20, + VideotexString: 21, + IA5String: 22, + UTCTime: 23, + GeneralizedTime: 24, + GraphicString: 25, + VisibleString: 26, + GeneralString: 28, + UniversalString: 29, + CharacterString: 30, + BMPString: 31, + Constructor: 32, + Context: 128 +}; diff --git a/reverse_engineering/node_modules/asn1/lib/ber/writer.js b/reverse_engineering/node_modules/asn1/lib/ber/writer.js new file mode 100644 index 0000000..3515acf --- /dev/null +++ b/reverse_engineering/node_modules/asn1/lib/ber/writer.js @@ -0,0 +1,317 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +var assert = require('assert'); +var Buffer = require('safer-buffer').Buffer; +var ASN1 = require('./types'); +var errors = require('./errors'); + + +// --- Globals + +var newInvalidAsn1Error = errors.newInvalidAsn1Error; + +var DEFAULT_OPTS = { + size: 1024, + growthFactor: 8 +}; + + +// --- Helpers + +function merge(from, to) { + assert.ok(from); + assert.equal(typeof (from), 'object'); + assert.ok(to); + assert.equal(typeof (to), 'object'); + + var keys = Object.getOwnPropertyNames(from); + keys.forEach(function (key) { + if (to[key]) + return; + + var value = Object.getOwnPropertyDescriptor(from, key); + Object.defineProperty(to, key, value); + }); + + return to; +} + + + +// --- API + +function Writer(options) { + options = merge(DEFAULT_OPTS, options || {}); + + this._buf = Buffer.alloc(options.size || 1024); + this._size = this._buf.length; + this._offset = 0; + this._options = options; + + // A list of offsets in the buffer where we need to insert + // sequence tag/len pairs. + this._seq = []; +} + +Object.defineProperty(Writer.prototype, 'buffer', { + get: function () { + if (this._seq.length) + throw newInvalidAsn1Error(this._seq.length + ' unended sequence(s)'); + + return (this._buf.slice(0, this._offset)); + } +}); + +Writer.prototype.writeByte = function (b) { + if (typeof (b) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(1); + this._buf[this._offset++] = b; +}; + + +Writer.prototype.writeInt = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Integer; + + var sz = 4; + + while ((((i & 0xff800000) === 0) || ((i & 0xff800000) === 0xff800000 >> 0)) && + (sz > 1)) { + sz--; + i <<= 8; + } + + if (sz > 4) + throw newInvalidAsn1Error('BER ints cannot be > 0xffffffff'); + + this._ensure(2 + sz); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = sz; + + while (sz-- > 0) { + this._buf[this._offset++] = ((i & 0xff000000) >>> 24); + i <<= 8; + } + +}; + + +Writer.prototype.writeNull = function () { + this.writeByte(ASN1.Null); + this.writeByte(0x00); +}; + + +Writer.prototype.writeEnumeration = function (i, tag) { + if (typeof (i) !== 'number') + throw new TypeError('argument must be a Number'); + if (typeof (tag) !== 'number') + tag = ASN1.Enumeration; + + return this.writeInt(i, tag); +}; + + +Writer.prototype.writeBoolean = function (b, tag) { + if (typeof (b) !== 'boolean') + throw new TypeError('argument must be a Boolean'); + if (typeof (tag) !== 'number') + tag = ASN1.Boolean; + + this._ensure(3); + this._buf[this._offset++] = tag; + this._buf[this._offset++] = 0x01; + this._buf[this._offset++] = b ? 0xff : 0x00; +}; + + +Writer.prototype.writeString = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string (was: ' + typeof (s) + ')'); + if (typeof (tag) !== 'number') + tag = ASN1.OctetString; + + var len = Buffer.byteLength(s); + this.writeByte(tag); + this.writeLength(len); + if (len) { + this._ensure(len); + this._buf.write(s, this._offset); + this._offset += len; + } +}; + + +Writer.prototype.writeBuffer = function (buf, tag) { + if (typeof (tag) !== 'number') + throw new TypeError('tag must be a number'); + if (!Buffer.isBuffer(buf)) + throw new TypeError('argument must be a buffer'); + + this.writeByte(tag); + this.writeLength(buf.length); + this._ensure(buf.length); + buf.copy(this._buf, this._offset, 0, buf.length); + this._offset += buf.length; +}; + + +Writer.prototype.writeStringArray = function (strings) { + if ((!strings instanceof Array)) + throw new TypeError('argument must be an Array[String]'); + + var self = this; + strings.forEach(function (s) { + self.writeString(s); + }); +}; + +// This is really to solve DER cases, but whatever for now +Writer.prototype.writeOID = function (s, tag) { + if (typeof (s) !== 'string') + throw new TypeError('argument must be a string'); + if (typeof (tag) !== 'number') + tag = ASN1.OID; + + if (!/^([0-9]+\.){3,}[0-9]+$/.test(s)) + throw new Error('argument is not a valid OID string'); + + function encodeOctet(bytes, octet) { + if (octet < 128) { + bytes.push(octet); + } else if (octet < 16384) { + bytes.push((octet >>> 7) | 0x80); + bytes.push(octet & 0x7F); + } else if (octet < 2097152) { + bytes.push((octet >>> 14) | 0x80); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else if (octet < 268435456) { + bytes.push((octet >>> 21) | 0x80); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } else { + bytes.push(((octet >>> 28) | 0x80) & 0xFF); + bytes.push(((octet >>> 21) | 0x80) & 0xFF); + bytes.push(((octet >>> 14) | 0x80) & 0xFF); + bytes.push(((octet >>> 7) | 0x80) & 0xFF); + bytes.push(octet & 0x7F); + } + } + + var tmp = s.split('.'); + var bytes = []; + bytes.push(parseInt(tmp[0], 10) * 40 + parseInt(tmp[1], 10)); + tmp.slice(2).forEach(function (b) { + encodeOctet(bytes, parseInt(b, 10)); + }); + + var self = this; + this._ensure(2 + bytes.length); + this.writeByte(tag); + this.writeLength(bytes.length); + bytes.forEach(function (b) { + self.writeByte(b); + }); +}; + + +Writer.prototype.writeLength = function (len) { + if (typeof (len) !== 'number') + throw new TypeError('argument must be a Number'); + + this._ensure(4); + + if (len <= 0x7f) { + this._buf[this._offset++] = len; + } else if (len <= 0xff) { + this._buf[this._offset++] = 0x81; + this._buf[this._offset++] = len; + } else if (len <= 0xffff) { + this._buf[this._offset++] = 0x82; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else if (len <= 0xffffff) { + this._buf[this._offset++] = 0x83; + this._buf[this._offset++] = len >> 16; + this._buf[this._offset++] = len >> 8; + this._buf[this._offset++] = len; + } else { + throw newInvalidAsn1Error('Length too long (> 4 bytes)'); + } +}; + +Writer.prototype.startSequence = function (tag) { + if (typeof (tag) !== 'number') + tag = ASN1.Sequence | ASN1.Constructor; + + this.writeByte(tag); + this._seq.push(this._offset); + this._ensure(3); + this._offset += 3; +}; + + +Writer.prototype.endSequence = function () { + var seq = this._seq.pop(); + var start = seq + 3; + var len = this._offset - start; + + if (len <= 0x7f) { + this._shift(start, len, -2); + this._buf[seq] = len; + } else if (len <= 0xff) { + this._shift(start, len, -1); + this._buf[seq] = 0x81; + this._buf[seq + 1] = len; + } else if (len <= 0xffff) { + this._buf[seq] = 0x82; + this._buf[seq + 1] = len >> 8; + this._buf[seq + 2] = len; + } else if (len <= 0xffffff) { + this._shift(start, len, 1); + this._buf[seq] = 0x83; + this._buf[seq + 1] = len >> 16; + this._buf[seq + 2] = len >> 8; + this._buf[seq + 3] = len; + } else { + throw newInvalidAsn1Error('Sequence too long'); + } +}; + + +Writer.prototype._shift = function (start, len, shift) { + assert.ok(start !== undefined); + assert.ok(len !== undefined); + assert.ok(shift); + + this._buf.copy(this._buf, start + shift, start, start + len); + this._offset += shift; +}; + +Writer.prototype._ensure = function (len) { + assert.ok(len); + + if (this._size - this._offset < len) { + var sz = this._size * this._options.growthFactor; + if (sz - this._offset < len) + sz += len; + + var buf = Buffer.alloc(sz); + + this._buf.copy(buf, 0, 0, this._offset); + this._buf = buf; + this._size = sz; + } +}; + + + +// --- Exported API + +module.exports = Writer; diff --git a/reverse_engineering/node_modules/asn1/lib/index.js b/reverse_engineering/node_modules/asn1/lib/index.js new file mode 100644 index 0000000..ede3ab2 --- /dev/null +++ b/reverse_engineering/node_modules/asn1/lib/index.js @@ -0,0 +1,20 @@ +// Copyright 2011 Mark Cavage All rights reserved. + +// If you have no idea what ASN.1 or BER is, see this: +// ftp://ftp.rsa.com/pub/pkcs/ascii/layman.asc + +var Ber = require('./ber/index'); + + + +// --- Exported API + +module.exports = { + + Ber: Ber, + + BerReader: Ber.Reader, + + BerWriter: Ber.Writer + +}; diff --git a/reverse_engineering/node_modules/asn1/package.json b/reverse_engineering/node_modules/asn1/package.json new file mode 100644 index 0000000..e04eb7f --- /dev/null +++ b/reverse_engineering/node_modules/asn1/package.json @@ -0,0 +1,78 @@ +{ + "_args": [ + [ + "asn1@0.2.4", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "asn1@0.2.4", + "_id": "asn1@0.2.4", + "_inBundle": false, + "_integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "_location": "/asn1", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "asn1@0.2.4", + "name": "asn1", + "escapedName": "asn1", + "rawSpec": "0.2.4", + "saveSpec": null, + "fetchSpec": "0.2.4" + }, + "_requiredBy": [ + "/ssh2-streams" + ], + "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "_spec": "0.2.4", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Joyent", + "url": "joyent.com" + }, + "bugs": { + "url": "https://github.com/joyent/node-asn1/issues" + }, + "contributors": [ + { + "name": "Mark Cavage", + "email": "mcavage@gmail.com" + }, + { + "name": "David Gwynne", + "email": "loki@animata.net" + }, + { + "name": "Yunong Xiao", + "email": "yunong@joyent.com" + }, + { + "name": "Alex Wilson", + "email": "alex.wilson@joyent.com" + } + ], + "dependencies": { + "safer-buffer": "~2.1.0" + }, + "description": "Contains parsers and serializers for ASN.1 (currently BER only)", + "devDependencies": { + "eslint": "2.13.1", + "eslint-plugin-joyent": "~1.3.0", + "faucet": "0.0.1", + "istanbul": "^0.3.6", + "tape": "^3.5.0" + }, + "homepage": "https://github.com/joyent/node-asn1#readme", + "license": "MIT", + "main": "lib/index.js", + "name": "asn1", + "repository": { + "type": "git", + "url": "git://github.com/joyent/node-asn1.git" + }, + "scripts": { + "test": "tape ./test/ber/*.test.js" + }, + "version": "0.2.4" +} diff --git a/reverse_engineering/node_modules/buffer-writer/.travis.yml b/reverse_engineering/node_modules/buffer-writer/.travis.yml new file mode 100644 index 0000000..8e59bb3 --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - 4 + - 6 + - 8 + - 10 + - 11 diff --git a/reverse_engineering/node_modules/buffer-writer/LICENSE b/reverse_engineering/node_modules/buffer-writer/LICENSE new file mode 100644 index 0000000..72dc60d --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/LICENSE @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/buffer-writer/README.md b/reverse_engineering/node_modules/buffer-writer/README.md new file mode 100644 index 0000000..81eccc0 --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/README.md @@ -0,0 +1,48 @@ +# buffer-writer + +[![Build Status](https://secure.travis-ci.org/brianc/node-buffer-writer.png?branch=master)](http://travis-ci.org/brianc/node-buffer-writer) + +Fast & efficient buffer writer used to keep memory usage low by internally recycling a single large buffer. + +Used as the binary protocol writer in [node-postgres](https://github.com/brianc/node-postgres) + +Since postgres requires big endian encoding, this only writes big endian numbers for now, but can & probably will easily be extended to write little endian as well. + +I'll admit this has a few postgres specific things I might need to take out in the future, such as `addHeader` + +## api + +`var writer = new (require('buffer-writer')());` + +### writer.addInt32(num) + +Writes a 4-byte big endian binary encoded number to the end of the buffer. + +### writer.addInt16(num) + +Writes a 2-byte big endian binary encoded number to the end of the buffer. + +### writer.addCString(string) + +Writes a string to the buffer `utf8` encoded and adds a null character (`\0`) at the end. + +### var buffer = writer.addHeader(char) + +Writes the 5 byte PostgreSQL required header to the beginning of the buffer. (1 byte for character, 1 BE Int32 for length of the buffer) + +### var buffer = writer.join() + +Collects all data in the writer and joins it into a single, new buffer. + +### var buffer = writer.flush(char) + +Writes the 5 byte postgres required message header, collects all data in the writer and joins it into a single, new buffer, and then resets the writer. + +## thoughts + +This is kind of node-postgres specific. If you're interested in using this for a more general purpose thing, lemme know. +I would love to work with you on getting this more reusable for your needs. + +## license + +MIT diff --git a/reverse_engineering/node_modules/buffer-writer/index.js b/reverse_engineering/node_modules/buffer-writer/index.js new file mode 100644 index 0000000..f3c119e --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/index.js @@ -0,0 +1,129 @@ +//binary data writer tuned for creating +//postgres message packets as effeciently as possible by reusing the +//same buffer to avoid memcpy and limit memory allocations +var Writer = module.exports = function (size) { + this.size = size || 1024; + this.buffer = Buffer.alloc(this.size + 5); + this.offset = 5; + this.headerPosition = 0; +}; + +//resizes internal buffer if not enough size left +Writer.prototype._ensure = function (size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.alloc(newSize); + oldBuffer.copy(this.buffer); + } +}; + +Writer.prototype.addInt32 = function (num) { + this._ensure(4); + this.buffer[this.offset++] = (num >>> 24 & 0xFF); + this.buffer[this.offset++] = (num >>> 16 & 0xFF); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; +}; + +Writer.prototype.addInt16 = function (num) { + this._ensure(2); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; +}; + +//for versions of node requiring 'length' as 3rd argument to buffer.write +var writeString = function (buffer, string, offset, len) { + buffer.write(string, offset, len); +}; + +//overwrite function for older versions of node +if (Buffer.prototype.write.length === 3) { + writeString = function (buffer, string, offset, len) { + buffer.write(string, offset); + }; +} + +Writer.prototype.addCString = function (string) { + //just write a 0 for empty or null strings + if (!string) { + this._ensure(1); + } else { + var len = Buffer.byteLength(string); + this._ensure(len + 1); //+1 for null terminator + writeString(this.buffer, string, this.offset, len); + this.offset += len; + } + + this.buffer[this.offset++] = 0; // null terminator + return this; +}; + +Writer.prototype.addChar = function (c) { + this._ensure(1); + writeString(this.buffer, c, this.offset, 1); + this.offset++; + return this; +}; + +Writer.prototype.addString = function (string) { + string = string || ""; + var len = Buffer.byteLength(string); + this._ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; +}; + +Writer.prototype.getByteLength = function () { + return this.offset - 5; +}; + +Writer.prototype.add = function (otherBuffer) { + this._ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; +}; + +Writer.prototype.clear = function () { + this.offset = 5; + this.headerPosition = 0; + this.lastEnd = 0; +}; + +//appends a header block to all the written data since the last +//subsequent header or to the beginning if there is only one data block +Writer.prototype.addHeader = function (code, last) { + var origOffset = this.offset; + this.offset = this.headerPosition; + this.buffer[this.offset++] = code; + //length is everything in this packet minus the code + this.addInt32(origOffset - (this.headerPosition + 1)); + //set next header position + this.headerPosition = origOffset; + //make space for next header + this.offset = origOffset; + if (!last) { + this._ensure(5); + this.offset += 5; + } +}; + +Writer.prototype.join = function (code) { + if (code) { + this.addHeader(code, true); + } + return this.buffer.slice(code ? 0 : 5, this.offset); +}; + +Writer.prototype.flush = function (code) { + var result = this.join(code); + this.clear(); + return result; +}; diff --git a/reverse_engineering/node_modules/buffer-writer/package.json b/reverse_engineering/node_modules/buffer-writer/package.json new file mode 100644 index 0000000..dc11b4b --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/package.json @@ -0,0 +1,57 @@ +{ + "_from": "buffer-writer@2.0.0", + "_id": "buffer-writer@2.0.0", + "_inBundle": false, + "_integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "_location": "/buffer-writer", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "buffer-writer@2.0.0", + "name": "buffer-writer", + "escapedName": "buffer-writer", + "rawSpec": "2.0.0", + "saveSpec": null, + "fetchSpec": "2.0.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "_shasum": "ce7eb81a38f7829db09c873f2fbb792c0c98ec04", + "_spec": "buffer-writer@2.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-buffer-writer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "a fast, efficient buffer writer", + "devDependencies": { + "mocha": "5.2.0" + }, + "engines": { + "node": ">=4" + }, + "homepage": "https://github.com/brianc/node-buffer-writer#readme", + "keywords": [ + "buffer", + "writer", + "builder" + ], + "license": "MIT", + "main": "index.js", + "name": "buffer-writer", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-buffer-writer.git" + }, + "scripts": { + "test": "mocha --throw-deprecation" + }, + "version": "2.0.0" +} diff --git a/reverse_engineering/node_modules/buffer-writer/test/mocha.opts b/reverse_engineering/node_modules/buffer-writer/test/mocha.opts new file mode 100644 index 0000000..5efaf24 --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/test/mocha.opts @@ -0,0 +1 @@ +--ui tdd diff --git a/reverse_engineering/node_modules/buffer-writer/test/writer-tests.js b/reverse_engineering/node_modules/buffer-writer/test/writer-tests.js new file mode 100644 index 0000000..ded91c8 --- /dev/null +++ b/reverse_engineering/node_modules/buffer-writer/test/writer-tests.js @@ -0,0 +1,218 @@ +var Writer = require(__dirname + "/../"); + +var assert = require('assert'); +var util = require('util'); + +assert.equalBuffers = function (actual, expected) { + var spit = function (actual, expected) { + console.log(""); + console.log("actual " + util.inspect(actual)); + console.log("expect " + util.inspect(expected)); + console.log(""); + }; + if (actual.length != expected.length) { + spit(actual, expected); + assert.strictEqual(actual.length, expected.length); + } + for (var i = 0; i < actual.length; i++) { + if (actual[i] != expected[i]) { + spit(actual, expected); + } + assert.strictEqual(actual[i], expected[i]); + } +}; + +suite('adding int32', function () { + var testAddingInt32 = function (int, expectedBuffer) { + test('writes ' + int, function () { + var subject = new Writer(); + var result = subject.addInt32(int).join(); + assert.equalBuffers(result, expectedBuffer); + }); + }; + + testAddingInt32(0, [0, 0, 0, 0]); + testAddingInt32(1, [0, 0, 0, 1]); + testAddingInt32(256, [0, 0, 1, 0]); + test('writes largest int32', function () { + //todo need to find largest int32 when I have internet access + return false; + }); + + test('writing multiple int32s', function () { + var subject = new Writer(); + var result = subject.addInt32(1).addInt32(10).addInt32(0).join(); + assert.equalBuffers(result, [0, 0, 0, 1, 0, 0, 0, 0x0a, 0, 0, 0, 0]); + }); + + suite('having to resize the buffer', function () { + test('after resize correct result returned', function () { + var subject = new Writer(10); + subject.addInt32(1).addInt32(1).addInt32(1); + assert.equalBuffers(subject.join(), [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1]); + }); + }); +}); + +suite('int16', function () { + test('writes 0', function () { + var subject = new Writer(); + var result = subject.addInt16(0).join(); + assert.equalBuffers(result, [0, 0]); + }); + + test('writes 400', function () { + var subject = new Writer(); + var result = subject.addInt16(400).join(); + assert.equalBuffers(result, [1, 0x90]); + }); + + test('writes many', function () { + var subject = new Writer(); + var result = subject.addInt16(0).addInt16(1).addInt16(2).join(); + assert.equalBuffers(result, [0, 0, 0, 1, 0, 2]); + }); + + test('resizes if internal buffer fills up', function () { + var subject = new Writer(3); + var result = subject.addInt16(2).addInt16(3).join(); + assert.equalBuffers(result, [0, 2, 0, 3]); + }); + +}); + +suite('cString', function () { + test('writes empty cstring', function () { + var subject = new Writer(); + var result = subject.addCString().join(); + assert.equalBuffers(result, [0]); + }); + + test('writes two empty cstrings', function () { + var subject = new Writer(); + var result = subject.addCString("").addCString("").join(); + assert.equalBuffers(result, [0, 0]); + }); + + + test('writes non-empty cstring', function () { + var subject = new Writer(); + var result = subject.addCString("!!!").join(); + assert.equalBuffers(result, [33, 33, 33, 0]); + }); + + test('resizes if reached end', function () { + var subject = new Writer(3); + var result = subject.addCString("!!!").join(); + assert.equalBuffers(result, [33, 33, 33, 0]); + }); + + test('writes multiple cstrings', function () { + var subject = new Writer(); + var result = subject.addCString("!").addCString("!").join(); + assert.equalBuffers(result, [33, 0, 33, 0]); + }); + +}); + +test('writes char', function () { + var subject = new Writer(2); + var result = subject.addChar('a').addChar('b').addChar('c').join(); + assert.equalBuffers(result, [0x61, 0x62, 0x63]); +}); + +test('gets correct byte length', function () { + var subject = new Writer(5); + assert.strictEqual(subject.getByteLength(), 0); + subject.addInt32(0); + assert.strictEqual(subject.getByteLength(), 4); + subject.addCString("!"); + assert.strictEqual(subject.getByteLength(), 6); +}); + +test('can add arbitrary buffer to the end', function () { + var subject = new Writer(4); + subject.addCString("!!!") + var result = subject.add(Buffer.from("@@@")).join(); + assert.equalBuffers(result, [33, 33, 33, 0, 0x40, 0x40, 0x40]); +}); + +suite('can write normal string', function () { + var subject = new Writer(4); + var result = subject.addString("!").join(); + assert.equalBuffers(result, [33]); + test('can write cString too', function () { + var result = subject.addCString("!").join(); + assert.equalBuffers(result, [33, 33, 0]); + }); + test('can resize', function () { + var result = subject.addString("!!").join(); + assert.equalBuffers(result, [33, 33, 0, 33, 33]); + }); +}); + + +suite('clearing', function () { + var subject = new Writer(); + subject.addCString("@!!#!#"); + subject.addInt32(10401); + test('clears', function () { + subject.clear(); + assert.equalBuffers(subject.join(), []); + }); + test('writing more', function () { + var joinedResult = subject.addCString("!").addInt32(9).addInt16(2).join(); + assert.equalBuffers(joinedResult, [33, 0, 0, 0, 0, 9, 0, 2]); + }); + test('returns result', function () { + var flushedResult = subject.flush(); + assert.equalBuffers(flushedResult, [33, 0, 0, 0, 0, 9, 0, 2]) + }); + test('clears the writer', function () { + assert.equalBuffers(subject.join(), []) + assert.equalBuffers(subject.flush(), []) + }); +}); + +test("resizing to much larger", function () { + var subject = new Writer(2); + var string = "!!!!!!!!"; + var result = subject.addCString(string).flush(); + assert.equalBuffers(result, [33, 33, 33, 33, 33, 33, 33, 33, 0]); +}); + +suite("flush", function () { + test('added as a hex code to a full writer', function () { + var subject = new Writer(2); + var result = subject.addCString("!").flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); + }); + + test('added as a hex code to a non-full writer', function () { + var subject = new Writer(10).addCString("!"); + var joinedResult = subject.join(0x50); + var result = subject.flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); + }); + + test('added as a hex code to a buffer which requires resizing', function () { + var result = new Writer(2).addCString("!!!!!!!!").flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 0x0D, 33, 33, 33, 33, 33, 33, 33, 33, 0]); + }); +}); + +suite("header", function () { + test('adding two packets with headers', function () { + var subject = new Writer(10).addCString("!"); + subject.addHeader(0x50); + subject.addCString("!!"); + subject.addHeader(0x40); + subject.addCString("!"); + var result = subject.flush(0x10); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0, 0x40, 0, 0, 0, 7, 33, 33, 0, 0x10, 0, 0, 0, 6, 33, 0]); + }); +}); + + + + diff --git a/reverse_engineering/node_modules/debug/.coveralls.yml b/reverse_engineering/node_modules/debug/.coveralls.yml new file mode 100644 index 0000000..20a7068 --- /dev/null +++ b/reverse_engineering/node_modules/debug/.coveralls.yml @@ -0,0 +1 @@ +repo_token: SIAeZjKYlHK74rbcFvNHMUzjRiMpflxve diff --git a/reverse_engineering/node_modules/debug/.eslintrc b/reverse_engineering/node_modules/debug/.eslintrc new file mode 100644 index 0000000..8a37ae2 --- /dev/null +++ b/reverse_engineering/node_modules/debug/.eslintrc @@ -0,0 +1,11 @@ +{ + "env": { + "browser": true, + "node": true + }, + "rules": { + "no-console": 0, + "no-empty": [1, { "allowEmptyCatch": true }] + }, + "extends": "eslint:recommended" +} diff --git a/reverse_engineering/node_modules/debug/.npmignore b/reverse_engineering/node_modules/debug/.npmignore new file mode 100644 index 0000000..5f60eec --- /dev/null +++ b/reverse_engineering/node_modules/debug/.npmignore @@ -0,0 +1,9 @@ +support +test +examples +example +*.sock +dist +yarn.lock +coverage +bower.json diff --git a/reverse_engineering/node_modules/debug/.travis.yml b/reverse_engineering/node_modules/debug/.travis.yml new file mode 100644 index 0000000..6c6090c --- /dev/null +++ b/reverse_engineering/node_modules/debug/.travis.yml @@ -0,0 +1,14 @@ + +language: node_js +node_js: + - "6" + - "5" + - "4" + +install: + - make node_modules + +script: + - make lint + - make test + - make coveralls diff --git a/reverse_engineering/node_modules/debug/CHANGELOG.md b/reverse_engineering/node_modules/debug/CHANGELOG.md new file mode 100644 index 0000000..eadaa18 --- /dev/null +++ b/reverse_engineering/node_modules/debug/CHANGELOG.md @@ -0,0 +1,362 @@ + +2.6.9 / 2017-09-22 +================== + + * remove ReDoS regexp in %o formatter (#504) + +2.6.8 / 2017-05-18 +================== + + * Fix: Check for undefined on browser globals (#462, @marbemac) + +2.6.7 / 2017-05-16 +================== + + * Fix: Update ms to 2.0.0 to fix regular expression denial of service vulnerability (#458, @hubdotcom) + * Fix: Inline extend function in node implementation (#452, @dougwilson) + * Docs: Fix typo (#455, @msasad) + +2.6.5 / 2017-04-27 +================== + + * Fix: null reference check on window.documentElement.style.WebkitAppearance (#447, @thebigredgeek) + * Misc: clean up browser reference checks (#447, @thebigredgeek) + * Misc: add npm-debug.log to .gitignore (@thebigredgeek) + + +2.6.4 / 2017-04-20 +================== + + * Fix: bug that would occure if process.env.DEBUG is a non-string value. (#444, @LucianBuzzo) + * Chore: ignore bower.json in npm installations. (#437, @joaovieira) + * Misc: update "ms" to v0.7.3 (@tootallnate) + +2.6.3 / 2017-03-13 +================== + + * Fix: Electron reference to `process.env.DEBUG` (#431, @paulcbetts) + * Docs: Changelog fix (@thebigredgeek) + +2.6.2 / 2017-03-10 +================== + + * Fix: DEBUG_MAX_ARRAY_LENGTH (#420, @slavaGanzin) + * Docs: Add backers and sponsors from Open Collective (#422, @piamancini) + * Docs: Add Slackin invite badge (@tootallnate) + +2.6.1 / 2017-02-10 +================== + + * Fix: Module's `export default` syntax fix for IE8 `Expected identifier` error + * Fix: Whitelist DEBUG_FD for values 1 and 2 only (#415, @pi0) + * Fix: IE8 "Expected identifier" error (#414, @vgoma) + * Fix: Namespaces would not disable once enabled (#409, @musikov) + +2.6.0 / 2016-12-28 +================== + + * Fix: added better null pointer checks for browser useColors (@thebigredgeek) + * Improvement: removed explicit `window.debug` export (#404, @tootallnate) + * Improvement: deprecated `DEBUG_FD` environment variable (#405, @tootallnate) + +2.5.2 / 2016-12-25 +================== + + * Fix: reference error on window within webworkers (#393, @KlausTrainer) + * Docs: fixed README typo (#391, @lurch) + * Docs: added notice about v3 api discussion (@thebigredgeek) + +2.5.1 / 2016-12-20 +================== + + * Fix: babel-core compatibility + +2.5.0 / 2016-12-20 +================== + + * Fix: wrong reference in bower file (@thebigredgeek) + * Fix: webworker compatibility (@thebigredgeek) + * Fix: output formatting issue (#388, @kribblo) + * Fix: babel-loader compatibility (#383, @escwald) + * Misc: removed built asset from repo and publications (@thebigredgeek) + * Misc: moved source files to /src (#378, @yamikuronue) + * Test: added karma integration and replaced babel with browserify for browser tests (#378, @yamikuronue) + * Test: coveralls integration (#378, @yamikuronue) + * Docs: simplified language in the opening paragraph (#373, @yamikuronue) + +2.4.5 / 2016-12-17 +================== + + * Fix: `navigator` undefined in Rhino (#376, @jochenberger) + * Fix: custom log function (#379, @hsiliev) + * Improvement: bit of cleanup + linting fixes (@thebigredgeek) + * Improvement: rm non-maintainted `dist/` dir (#375, @freewil) + * Docs: simplified language in the opening paragraph. (#373, @yamikuronue) + +2.4.4 / 2016-12-14 +================== + + * Fix: work around debug being loaded in preload scripts for electron (#368, @paulcbetts) + +2.4.3 / 2016-12-14 +================== + + * Fix: navigation.userAgent error for react native (#364, @escwald) + +2.4.2 / 2016-12-14 +================== + + * Fix: browser colors (#367, @tootallnate) + * Misc: travis ci integration (@thebigredgeek) + * Misc: added linting and testing boilerplate with sanity check (@thebigredgeek) + +2.4.1 / 2016-12-13 +================== + + * Fix: typo that broke the package (#356) + +2.4.0 / 2016-12-13 +================== + + * Fix: bower.json references unbuilt src entry point (#342, @justmatt) + * Fix: revert "handle regex special characters" (@tootallnate) + * Feature: configurable util.inspect()`options for NodeJS (#327, @tootallnate) + * Feature: %O`(big O) pretty-prints objects (#322, @tootallnate) + * Improvement: allow colors in workers (#335, @botverse) + * Improvement: use same color for same namespace. (#338, @lchenay) + +2.3.3 / 2016-11-09 +================== + + * Fix: Catch `JSON.stringify()` errors (#195, Jovan Alleyne) + * Fix: Returning `localStorage` saved values (#331, Levi Thomason) + * Improvement: Don't create an empty object when no `process` (Nathan Rajlich) + +2.3.2 / 2016-11-09 +================== + + * Fix: be super-safe in index.js as well (@TooTallNate) + * Fix: should check whether process exists (Tom Newby) + +2.3.1 / 2016-11-09 +================== + + * Fix: Added electron compatibility (#324, @paulcbetts) + * Improvement: Added performance optimizations (@tootallnate) + * Readme: Corrected PowerShell environment variable example (#252, @gimre) + * Misc: Removed yarn lock file from source control (#321, @fengmk2) + +2.3.0 / 2016-11-07 +================== + + * Fix: Consistent placement of ms diff at end of output (#215, @gorangajic) + * Fix: Escaping of regex special characters in namespace strings (#250, @zacronos) + * Fix: Fixed bug causing crash on react-native (#282, @vkarpov15) + * Feature: Enabled ES6+ compatible import via default export (#212 @bucaran) + * Feature: Added %O formatter to reflect Chrome's console.log capability (#279, @oncletom) + * Package: Update "ms" to 0.7.2 (#315, @DevSide) + * Package: removed superfluous version property from bower.json (#207 @kkirsche) + * Readme: fix USE_COLORS to DEBUG_COLORS + * Readme: Doc fixes for format string sugar (#269, @mlucool) + * Readme: Updated docs for DEBUG_FD and DEBUG_COLORS environment variables (#232, @mattlyons0) + * Readme: doc fixes for PowerShell (#271 #243, @exoticknight @unreadable) + * Readme: better docs for browser support (#224, @matthewmueller) + * Tooling: Added yarn integration for development (#317, @thebigredgeek) + * Misc: Renamed History.md to CHANGELOG.md (@thebigredgeek) + * Misc: Added license file (#226 #274, @CantemoInternal @sdaitzman) + * Misc: Updated contributors (@thebigredgeek) + +2.2.0 / 2015-05-09 +================== + + * package: update "ms" to v0.7.1 (#202, @dougwilson) + * README: add logging to file example (#193, @DanielOchoa) + * README: fixed a typo (#191, @amir-s) + * browser: expose `storage` (#190, @stephenmathieson) + * Makefile: add a `distclean` target (#189, @stephenmathieson) + +2.1.3 / 2015-03-13 +================== + + * Updated stdout/stderr example (#186) + * Updated example/stdout.js to match debug current behaviour + * Renamed example/stderr.js to stdout.js + * Update Readme.md (#184) + * replace high intensity foreground color for bold (#182, #183) + +2.1.2 / 2015-03-01 +================== + + * dist: recompile + * update "ms" to v0.7.0 + * package: update "browserify" to v9.0.3 + * component: fix "ms.js" repo location + * changed bower package name + * updated documentation about using debug in a browser + * fix: security error on safari (#167, #168, @yields) + +2.1.1 / 2014-12-29 +================== + + * browser: use `typeof` to check for `console` existence + * browser: check for `console.log` truthiness (fix IE 8/9) + * browser: add support for Chrome apps + * Readme: added Windows usage remarks + * Add `bower.json` to properly support bower install + +2.1.0 / 2014-10-15 +================== + + * node: implement `DEBUG_FD` env variable support + * package: update "browserify" to v6.1.0 + * package: add "license" field to package.json (#135, @panuhorsmalahti) + +2.0.0 / 2014-09-01 +================== + + * package: update "browserify" to v5.11.0 + * node: use stderr rather than stdout for logging (#29, @stephenmathieson) + +1.0.4 / 2014-07-15 +================== + + * dist: recompile + * example: remove `console.info()` log usage + * example: add "Content-Type" UTF-8 header to browser example + * browser: place %c marker after the space character + * browser: reset the "content" color via `color: inherit` + * browser: add colors support for Firefox >= v31 + * debug: prefer an instance `log()` function over the global one (#119) + * Readme: update documentation about styled console logs for FF v31 (#116, @wryk) + +1.0.3 / 2014-07-09 +================== + + * Add support for multiple wildcards in namespaces (#122, @seegno) + * browser: fix lint + +1.0.2 / 2014-06-10 +================== + + * browser: update color palette (#113, @gscottolson) + * common: make console logging function configurable (#108, @timoxley) + * node: fix %o colors on old node <= 0.8.x + * Makefile: find node path using shell/which (#109, @timoxley) + +1.0.1 / 2014-06-06 +================== + + * browser: use `removeItem()` to clear localStorage + * browser, node: don't set DEBUG if namespaces is undefined (#107, @leedm777) + * package: add "contributors" section + * node: fix comment typo + * README: list authors + +1.0.0 / 2014-06-04 +================== + + * make ms diff be global, not be scope + * debug: ignore empty strings in enable() + * node: make DEBUG_COLORS able to disable coloring + * *: export the `colors` array + * npmignore: don't publish the `dist` dir + * Makefile: refactor to use browserify + * package: add "browserify" as a dev dependency + * Readme: add Web Inspector Colors section + * node: reset terminal color for the debug content + * node: map "%o" to `util.inspect()` + * browser: map "%j" to `JSON.stringify()` + * debug: add custom "formatters" + * debug: use "ms" module for humanizing the diff + * Readme: add "bash" syntax highlighting + * browser: add Firebug color support + * browser: add colors for WebKit browsers + * node: apply log to `console` + * rewrite: abstract common logic for Node & browsers + * add .jshintrc file + +0.8.1 / 2014-04-14 +================== + + * package: re-add the "component" section + +0.8.0 / 2014-03-30 +================== + + * add `enable()` method for nodejs. Closes #27 + * change from stderr to stdout + * remove unnecessary index.js file + +0.7.4 / 2013-11-13 +================== + + * remove "browserify" key from package.json (fixes something in browserify) + +0.7.3 / 2013-10-30 +================== + + * fix: catch localStorage security error when cookies are blocked (Chrome) + * add debug(err) support. Closes #46 + * add .browser prop to package.json. Closes #42 + +0.7.2 / 2013-02-06 +================== + + * fix package.json + * fix: Mobile Safari (private mode) is broken with debug + * fix: Use unicode to send escape character to shell instead of octal to work with strict mode javascript + +0.7.1 / 2013-02-05 +================== + + * add repository URL to package.json + * add DEBUG_COLORED to force colored output + * add browserify support + * fix component. Closes #24 + +0.7.0 / 2012-05-04 +================== + + * Added .component to package.json + * Added debug.component.js build + +0.6.0 / 2012-03-16 +================== + + * Added support for "-" prefix in DEBUG [Vinay Pulim] + * Added `.enabled` flag to the node version [TooTallNate] + +0.5.0 / 2012-02-02 +================== + + * Added: humanize diffs. Closes #8 + * Added `debug.disable()` to the CS variant + * Removed padding. Closes #10 + * Fixed: persist client-side variant again. Closes #9 + +0.4.0 / 2012-02-01 +================== + + * Added browser variant support for older browsers [TooTallNate] + * Added `debug.enable('project:*')` to browser variant [TooTallNate] + * Added padding to diff (moved it to the right) + +0.3.0 / 2012-01-26 +================== + + * Added millisecond diff when isatty, otherwise UTC string + +0.2.0 / 2012-01-22 +================== + + * Added wildcard support + +0.1.0 / 2011-12-02 +================== + + * Added: remove colors unless stderr isatty [TooTallNate] + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/reverse_engineering/node_modules/debug/LICENSE b/reverse_engineering/node_modules/debug/LICENSE new file mode 100644 index 0000000..658c933 --- /dev/null +++ b/reverse_engineering/node_modules/debug/LICENSE @@ -0,0 +1,19 @@ +(The MIT License) + +Copyright (c) 2014 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/reverse_engineering/node_modules/debug/Makefile b/reverse_engineering/node_modules/debug/Makefile new file mode 100644 index 0000000..584da8b --- /dev/null +++ b/reverse_engineering/node_modules/debug/Makefile @@ -0,0 +1,50 @@ +# get Makefile directory name: http://stackoverflow.com/a/5982798/376773 +THIS_MAKEFILE_PATH:=$(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)) +THIS_DIR:=$(shell cd $(dir $(THIS_MAKEFILE_PATH));pwd) + +# BIN directory +BIN := $(THIS_DIR)/node_modules/.bin + +# Path +PATH := node_modules/.bin:$(PATH) +SHELL := /bin/bash + +# applications +NODE ?= $(shell which node) +YARN ?= $(shell which yarn) +PKG ?= $(if $(YARN),$(YARN),$(NODE) $(shell which npm)) +BROWSERIFY ?= $(NODE) $(BIN)/browserify + +.FORCE: + +install: node_modules + +node_modules: package.json + @NODE_ENV= $(PKG) install + @touch node_modules + +lint: .FORCE + eslint browser.js debug.js index.js node.js + +test-node: .FORCE + istanbul cover node_modules/mocha/bin/_mocha -- test/**.js + +test-browser: .FORCE + mkdir -p dist + + @$(BROWSERIFY) \ + --standalone debug \ + . > dist/debug.js + + karma start --single-run + rimraf dist + +test: .FORCE + concurrently \ + "make test-node" \ + "make test-browser" + +coveralls: + cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js + +.PHONY: all install clean distclean diff --git a/reverse_engineering/node_modules/debug/README.md b/reverse_engineering/node_modules/debug/README.md new file mode 100644 index 0000000..f67be6b --- /dev/null +++ b/reverse_engineering/node_modules/debug/README.md @@ -0,0 +1,312 @@ +# debug +[![Build Status](https://travis-ci.org/visionmedia/debug.svg?branch=master)](https://travis-ci.org/visionmedia/debug) [![Coverage Status](https://coveralls.io/repos/github/visionmedia/debug/badge.svg?branch=master)](https://coveralls.io/github/visionmedia/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny node.js debugging utility modelled after node core's debugging technique. + +**Discussion around the V3 API is under way [here](https://github.com/visionmedia/debug/issues/370)** + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example _app.js_: + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %s', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example _worker.js_: + +```js +var debug = require('debug')('worker'); + +setInterval(function(){ + debug('doing some work'); +}, 1000); +``` + + The __DEBUG__ environment variable is then used to enable these based on space or comma-delimited names. Here are some examples: + + ![debug http and worker](http://f.cl.ly/items/18471z1H402O24072r1J/Screenshot.png) + + ![debug worker](http://f.cl.ly/items/1X413v1a3M0d3C2c1E0i/Screenshot.png) + +#### Windows note + + On Windows the environment variable is set using the `set` command. + + ```cmd + set DEBUG=*,-not_this + ``` + + Note that PowerShell uses different syntax to set environment variables. + + ```cmd + $env:DEBUG = "*,-not_this" + ``` + +Then, run the program to be debugged as usual. + +## Millisecond diff + + When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + ![](http://f.cl.ly/items/2i3h1d3t121M2Z1A3Q0N/Screenshot.png) + + When stdout is not a TTY, `Date#toUTCString()` is used, making it more useful for logging the debug information as shown below: + + ![](http://f.cl.ly/items/112H3i0e0o0P0a2Q2r11/Screenshot.png) + +## Conventions + + If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". + +## Wildcards + + The `*` character may be used as a wildcard. Suppose for example your library has debuggers named "connect:bodyParser", "connect:compress", "connect:session", instead of listing all three with `DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do `DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + + You can also exclude specific debuggers by prefixing them with a "-" character. For example, `DEBUG=*,-connect:*` would include all debuggers except those starting with "connect:". + +## Environment Variables + + When running through Node.js, you can set a few environment variables that will + change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + + __Note:__ The environment variables beginning with `DEBUG_` end up being + converted into an Options object that gets used with `%o`/`%O` formatters. + See the Node.js documentation for + [`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) + for the complete list. + +## Formatters + + + Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + +### Custom formatters + + You can add custom formatters by extending the `debug.formatters` object. For example, if you wanted to add support for rendering a Buffer as hex with `%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + +## Browser support + You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), + or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), + if you don't want to build it yourself. + + Debug's enable state is currently persisted by `localStorage`. + Consider the situation shown below where you have `worker:a` and `worker:b`, + and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +#### Web Inspector Colors + + Colors are also enabled on "Web Inspectors" that understand the `%c` formatting + option. These are WebKit web inspectors, Firefox ([since version + 31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) + and the Firebug plugin for Firefox (any version). + + Colored output looks something like: + + ![](https://cloud.githubusercontent.com/assets/71256/3139768/b98c5fd8-e8ef-11e3-862a-f7253b6f47c6.png) + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example _stdout.js_: + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2016 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/reverse_engineering/node_modules/debug/component.json b/reverse_engineering/node_modules/debug/component.json new file mode 100644 index 0000000..9de2641 --- /dev/null +++ b/reverse_engineering/node_modules/debug/component.json @@ -0,0 +1,19 @@ +{ + "name": "debug", + "repo": "visionmedia/debug", + "description": "small debugging utility", + "version": "2.6.9", + "keywords": [ + "debug", + "log", + "debugger" + ], + "main": "src/browser.js", + "scripts": [ + "src/browser.js", + "src/debug.js" + ], + "dependencies": { + "rauchg/ms.js": "0.7.1" + } +} diff --git a/reverse_engineering/node_modules/debug/karma.conf.js b/reverse_engineering/node_modules/debug/karma.conf.js new file mode 100644 index 0000000..103a82d --- /dev/null +++ b/reverse_engineering/node_modules/debug/karma.conf.js @@ -0,0 +1,70 @@ +// Karma configuration +// Generated on Fri Dec 16 2016 13:09:51 GMT+0000 (UTC) + +module.exports = function(config) { + config.set({ + + // base path that will be used to resolve all patterns (eg. files, exclude) + basePath: '', + + + // frameworks to use + // available frameworks: https://npmjs.org/browse/keyword/karma-adapter + frameworks: ['mocha', 'chai', 'sinon'], + + + // list of files / patterns to load in the browser + files: [ + 'dist/debug.js', + 'test/*spec.js' + ], + + + // list of files to exclude + exclude: [ + 'src/node.js' + ], + + + // preprocess matching files before serving them to the browser + // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor + preprocessors: { + }, + + // test results reporter to use + // possible values: 'dots', 'progress' + // available reporters: https://npmjs.org/browse/keyword/karma-reporter + reporters: ['progress'], + + + // web server port + port: 9876, + + + // enable / disable colors in the output (reporters and logs) + colors: true, + + + // level of logging + // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG + logLevel: config.LOG_INFO, + + + // enable / disable watching file and executing tests whenever any file changes + autoWatch: true, + + + // start these browsers + // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher + browsers: ['PhantomJS'], + + + // Continuous Integration mode + // if true, Karma captures browsers, runs the tests and exits + singleRun: false, + + // Concurrency level + // how many browser should be started simultaneous + concurrency: Infinity + }) +} diff --git a/reverse_engineering/node_modules/debug/node.js b/reverse_engineering/node_modules/debug/node.js new file mode 100644 index 0000000..7fc36fe --- /dev/null +++ b/reverse_engineering/node_modules/debug/node.js @@ -0,0 +1 @@ +module.exports = require('./src/node'); diff --git a/reverse_engineering/node_modules/debug/package.json b/reverse_engineering/node_modules/debug/package.json new file mode 100644 index 0000000..d207019 --- /dev/null +++ b/reverse_engineering/node_modules/debug/package.json @@ -0,0 +1,91 @@ +{ + "_args": [ + [ + "debug@2.6.9", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "debug@2.6.9", + "_id": "debug@2.6.9", + "_inBundle": false, + "_integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "_location": "/debug", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "debug@2.6.9", + "name": "debug", + "escapedName": "debug", + "rawSpec": "2.6.9", + "saveSpec": null, + "fetchSpec": "2.6.9" + }, + "_requiredBy": [ + "/tunnel-ssh" + ], + "_resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "_spec": "2.6.9", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca" + }, + "browser": "./src/browser.js", + "bugs": { + "url": "https://github.com/visionmedia/debug/issues" + }, + "component": { + "scripts": { + "debug/index.js": "browser.js", + "debug/debug.js": "debug.js" + } + }, + "contributors": [ + { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://n8.io" + }, + { + "name": "Andrew Rhyne", + "email": "rhyneandrew@gmail.com" + } + ], + "dependencies": { + "ms": "2.0.0" + }, + "description": "small debugging utility", + "devDependencies": { + "browserify": "9.0.3", + "chai": "^3.5.0", + "concurrently": "^3.1.0", + "coveralls": "^2.11.15", + "eslint": "^3.12.1", + "istanbul": "^0.4.5", + "karma": "^1.3.0", + "karma-chai": "^0.1.0", + "karma-mocha": "^1.3.0", + "karma-phantomjs-launcher": "^1.0.2", + "karma-sinon": "^1.0.5", + "mocha": "^3.2.0", + "mocha-lcov-reporter": "^1.2.0", + "rimraf": "^2.5.4", + "sinon": "^1.17.6", + "sinon-chai": "^2.8.0" + }, + "homepage": "https://github.com/visionmedia/debug#readme", + "keywords": [ + "debug", + "log", + "debugger" + ], + "license": "MIT", + "main": "./src/index.js", + "name": "debug", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" + }, + "version": "2.6.9" +} diff --git a/reverse_engineering/node_modules/debug/src/browser.js b/reverse_engineering/node_modules/debug/src/browser.js new file mode 100644 index 0000000..7106924 --- /dev/null +++ b/reverse_engineering/node_modules/debug/src/browser.js @@ -0,0 +1,185 @@ +/** + * This is the web browser implementation of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = require('./debug'); +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = 'undefined' != typeof chrome + && 'undefined' != typeof chrome.storage + ? chrome.storage.local + : localstorage(); + +/** + * Colors. + */ + +exports.colors = [ + 'lightseagreen', + 'forestgreen', + 'goldenrod', + 'dodgerblue', + 'darkorchid', + 'crimson' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && window.process.type === 'renderer') { + return true; + } + + // is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +exports.formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (err) { + return '[UnexpectedJSONParseError]: ' + err.message; + } +}; + + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + var useColors = this.useColors; + + args[0] = (useColors ? '%c' : '') + + this.namespace + + (useColors ? ' %c' : ' ') + + args[0] + + (useColors ? '%c ' : ' ') + + '+' + exports.humanize(this.diff); + + if (!useColors) return; + + var c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit') + + // the final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + var index = 0; + var lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, function(match) { + if ('%%' === match) return; + index++; + if ('%c' === match) { + // we only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.log()` when available. + * No-op when `console.log` is not a "function". + * + * @api public + */ + +function log() { + // this hackery is required for IE8/9, where + // the `console.log` function doesn't have 'apply' + return 'object' === typeof console + && console.log + && Function.prototype.apply.call(console.log, console, arguments); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + +function save(namespaces) { + try { + if (null == namespaces) { + exports.storage.removeItem('debug'); + } else { + exports.storage.debug = namespaces; + } + } catch(e) {} +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + var r; + try { + r = exports.storage.debug; + } catch(e) {} + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Enable namespaces listed in `localStorage.debug` initially. + */ + +exports.enable(load()); + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + return window.localStorage; + } catch (e) {} +} diff --git a/reverse_engineering/node_modules/debug/src/debug.js b/reverse_engineering/node_modules/debug/src/debug.js new file mode 100644 index 0000000..6a5e3fc --- /dev/null +++ b/reverse_engineering/node_modules/debug/src/debug.js @@ -0,0 +1,202 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = createDebug.debug = createDebug['default'] = createDebug; +exports.coerce = coerce; +exports.disable = disable; +exports.enable = enable; +exports.enabled = enabled; +exports.humanize = require('ms'); + +/** + * The currently active debug mode names, and names to skip. + */ + +exports.names = []; +exports.skips = []; + +/** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + +exports.formatters = {}; + +/** + * Previous log timestamp. + */ + +var prevTime; + +/** + * Select a color. + * @param {String} namespace + * @return {Number} + * @api private + */ + +function selectColor(namespace) { + var hash = 0, i; + + for (i in namespace) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return exports.colors[Math.abs(hash) % exports.colors.length]; +} + +/** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + +function createDebug(namespace) { + + function debug() { + // disabled? + if (!debug.enabled) return; + + var self = debug; + + // set `diff` timestamp + var curr = +new Date(); + var ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + // turn the `arguments` into a proper Array + var args = new Array(arguments.length); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i]; + } + + args[0] = exports.coerce(args[0]); + + if ('string' !== typeof args[0]) { + // anything else let's inspect with %O + args.unshift('%O'); + } + + // apply any `formatters` transformations + var index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, function(match, format) { + // if we encounter an escaped % then don't increase the array index + if (match === '%%') return match; + index++; + var formatter = exports.formatters[format]; + if ('function' === typeof formatter) { + var val = args[index]; + match = formatter.call(self, val); + + // now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // apply env-specific formatting (colors, etc.) + exports.formatArgs.call(self, args); + + var logFn = debug.log || exports.log || console.log.bind(console); + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.enabled = exports.enabled(namespace); + debug.useColors = exports.useColors(); + debug.color = selectColor(namespace); + + // env-specific initialization logic for debug instances + if ('function' === typeof exports.init) { + exports.init(debug); + } + + return debug; +} + +/** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + +function enable(namespaces) { + exports.save(namespaces); + + exports.names = []; + exports.skips = []; + + var split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + var len = split.length; + + for (var i = 0; i < len; i++) { + if (!split[i]) continue; // ignore empty strings + namespaces = split[i].replace(/\*/g, '.*?'); + if (namespaces[0] === '-') { + exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); + } else { + exports.names.push(new RegExp('^' + namespaces + '$')); + } + } +} + +/** + * Disable debug output. + * + * @api public + */ + +function disable() { + exports.enable(''); +} + +/** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + +function enabled(name) { + var i, len; + for (i = 0, len = exports.skips.length; i < len; i++) { + if (exports.skips[i].test(name)) { + return false; + } + } + for (i = 0, len = exports.names.length; i < len; i++) { + if (exports.names[i].test(name)) { + return true; + } + } + return false; +} + +/** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + +function coerce(val) { + if (val instanceof Error) return val.stack || val.message; + return val; +} diff --git a/reverse_engineering/node_modules/debug/src/index.js b/reverse_engineering/node_modules/debug/src/index.js new file mode 100644 index 0000000..e12cf4d --- /dev/null +++ b/reverse_engineering/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer process, which is node, but we should + * treat as a browser. + */ + +if (typeof process !== 'undefined' && process.type === 'renderer') { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/reverse_engineering/node_modules/debug/src/inspector-log.js b/reverse_engineering/node_modules/debug/src/inspector-log.js new file mode 100644 index 0000000..60ea6c0 --- /dev/null +++ b/reverse_engineering/node_modules/debug/src/inspector-log.js @@ -0,0 +1,15 @@ +module.exports = inspectorLog; + +// black hole +const nullStream = new (require('stream').Writable)(); +nullStream._write = () => {}; + +/** + * Outputs a `console.log()` to the Node.js Inspector console *only*. + */ +function inspectorLog() { + const stdout = console._stdout; + console._stdout = nullStream; + console.log.apply(console, arguments); + console._stdout = stdout; +} diff --git a/reverse_engineering/node_modules/debug/src/node.js b/reverse_engineering/node_modules/debug/src/node.js new file mode 100644 index 0000000..b15109c --- /dev/null +++ b/reverse_engineering/node_modules/debug/src/node.js @@ -0,0 +1,248 @@ +/** + * Module dependencies. + */ + +var tty = require('tty'); +var util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + * + * Expose `debug()` as the module. + */ + +exports = module.exports = require('./debug'); +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(function (key) { + return /^debug_/i.test(key); +}).reduce(function (obj, key) { + // camel-case + var prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() }); + + // coerce string value into JS value + var val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) val = true; + else if (/^(no|off|false|disabled)$/i.test(val)) val = false; + else if (val === 'null') val = null; + else val = Number(val); + + obj[prop] = val; + return obj; +}, {}); + +/** + * The file descriptor to write the `debug()` calls to. + * Set the `DEBUG_FD` env variable to override with another value. i.e.: + * + * $ DEBUG_FD=3 node script.js 3>debug.log + */ + +var fd = parseInt(process.env.DEBUG_FD, 10) || 2; + +if (1 !== fd && 2 !== fd) { + util.deprecate(function(){}, 'except for stderr(2) and stdout(1), any other usage of DEBUG_FD is deprecated. Override debug.log if you want to use a different log function (https://git.io/debug_fd)')() +} + +var stream = 1 === fd ? process.stdout : + 2 === fd ? process.stderr : + createWritableStdioStream(fd); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts + ? Boolean(exports.inspectOpts.colors) + : tty.isatty(fd); +} + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +exports.formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n').map(function(str) { + return str.trim() + }).join(' '); +}; + +/** + * Map %o to `util.inspect()`, allowing multiple lines if needed. + */ + +exports.formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + var name = this.namespace; + var useColors = this.useColors; + + if (useColors) { + var c = this.color; + var prefix = ' \u001b[3' + c + ';1m' + name + ' ' + '\u001b[0m'; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push('\u001b[3' + c + 'm+' + exports.humanize(this.diff) + '\u001b[0m'); + } else { + args[0] = new Date().toUTCString() + + ' ' + name + ' ' + args[0]; + } +} + +/** + * Invokes `util.format()` with the specified arguments and writes to `stream`. + */ + +function log() { + return stream.write(util.format.apply(util, arguments) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ + +function save(namespaces) { + if (null == namespaces) { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } else { + process.env.DEBUG = namespaces; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Copied from `node/src/node.js`. + * + * XXX: It's lame that node doesn't expose this API out-of-the-box. It also + * relies on the undocumented `tty_wrap.guessHandleType()` which is also lame. + */ + +function createWritableStdioStream (fd) { + var stream; + var tty_wrap = process.binding('tty_wrap'); + + // Note stream._type is used for test-module-load-list.js + + switch (tty_wrap.guessHandleType(fd)) { + case 'TTY': + stream = new tty.WriteStream(fd); + stream._type = 'tty'; + + // Hack to have stream not keep the event loop alive. + // See https://github.com/joyent/node/issues/1726 + if (stream._handle && stream._handle.unref) { + stream._handle.unref(); + } + break; + + case 'FILE': + var fs = require('fs'); + stream = new fs.SyncWriteStream(fd, { autoClose: false }); + stream._type = 'fs'; + break; + + case 'PIPE': + case 'TCP': + var net = require('net'); + stream = new net.Socket({ + fd: fd, + readable: false, + writable: true + }); + + // FIXME Should probably have an option in net.Socket to create a + // stream from an existing fd which is writable only. But for now + // we'll just add this hack and set the `readable` member to false. + // Test: ./node test/fixtures/echo.js < /etc/passwd + stream.readable = false; + stream.read = null; + stream._type = 'pipe'; + + // FIXME Hack to have stream not keep the event loop alive. + // See https://github.com/joyent/node/issues/1726 + if (stream._handle && stream._handle.unref) { + stream._handle.unref(); + } + break; + + default: + // Probably an error on in uv_guess_handle() + throw new Error('Implement me. Unknown stream file type!'); + } + + // For supporting legacy API we put the FD here. + stream.fd = fd; + + stream._isStdio = true; + + return stream; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init (debug) { + debug.inspectOpts = {}; + + var keys = Object.keys(exports.inspectOpts); + for (var i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +/** + * Enable namespaces listed in `process.env.DEBUG` initially. + */ + +exports.enable(load()); diff --git a/reverse_engineering/node_modules/inherits/LICENSE b/reverse_engineering/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/reverse_engineering/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/reverse_engineering/node_modules/inherits/README.md b/reverse_engineering/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/reverse_engineering/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/reverse_engineering/node_modules/inherits/inherits.js b/reverse_engineering/node_modules/inherits/inherits.js new file mode 100644 index 0000000..f71f2d9 --- /dev/null +++ b/reverse_engineering/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/reverse_engineering/node_modules/inherits/inherits_browser.js b/reverse_engineering/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..86bbb3d --- /dev/null +++ b/reverse_engineering/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/reverse_engineering/node_modules/inherits/package.json b/reverse_engineering/node_modules/inherits/package.json new file mode 100644 index 0000000..cdc11a1 --- /dev/null +++ b/reverse_engineering/node_modules/inherits/package.json @@ -0,0 +1,61 @@ +{ + "_from": "inherits@^2.0.3", + "_id": "inherits@2.0.4", + "_inBundle": false, + "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "_location": "/inherits", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "inherits@^2.0.3", + "name": "inherits", + "escapedName": "inherits", + "rawSpec": "^2.0.3", + "saveSpec": null, + "fetchSpec": "^2.0.3" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", + "_spec": "inherits@^2.0.3", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/readable-stream", + "browser": "./inherits_browser.js", + "bugs": { + "url": "https://github.com/isaacs/inherits/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ], + "homepage": "https://github.com/isaacs/inherits#readme", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "license": "ISC", + "main": "./inherits.js", + "name": "inherits", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/inherits.git" + }, + "scripts": { + "test": "tap" + }, + "version": "2.0.4" +} diff --git a/reverse_engineering/node_modules/lodash.defaults/LICENSE b/reverse_engineering/node_modules/lodash.defaults/LICENSE new file mode 100644 index 0000000..e0c69d5 --- /dev/null +++ b/reverse_engineering/node_modules/lodash.defaults/LICENSE @@ -0,0 +1,47 @@ +Copyright jQuery Foundation and other contributors + +Based on Underscore.js, copyright Jeremy Ashkenas, +DocumentCloud and Investigative Reporters & Editors + +This software consists of voluntary contributions made by many +individuals. For exact contribution history, see the revision history +available at https://github.com/lodash/lodash + +The following license applies to all parts of this software except as +documented below: + +==== + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +==== + +Copyright and related rights for sample code are waived via CC0. Sample +code is defined as all source code displayed within the prose of the +documentation. + +CC0: http://creativecommons.org/publicdomain/zero/1.0/ + +==== + +Files located in the node_modules and vendor directories are externally +maintained libraries used by this software which have their own +licenses; we recommend you read them, as their terms may differ from the +terms above. diff --git a/reverse_engineering/node_modules/lodash.defaults/README.md b/reverse_engineering/node_modules/lodash.defaults/README.md new file mode 100644 index 0000000..a129849 --- /dev/null +++ b/reverse_engineering/node_modules/lodash.defaults/README.md @@ -0,0 +1,18 @@ +# lodash.defaults v4.2.0 + +The [lodash](https://lodash.com/) method `_.defaults` exported as a [Node.js](https://nodejs.org/) module. + +## Installation + +Using npm: +```bash +$ {sudo -H} npm i -g npm +$ npm i --save lodash.defaults +``` + +In Node.js: +```js +var defaults = require('lodash.defaults'); +``` + +See the [documentation](https://lodash.com/docs#defaults) or [package source](https://github.com/lodash/lodash/blob/4.2.0-npm-packages/lodash.defaults) for more details. diff --git a/reverse_engineering/node_modules/lodash.defaults/index.js b/reverse_engineering/node_modules/lodash.defaults/index.js new file mode 100644 index 0000000..25eba9c --- /dev/null +++ b/reverse_engineering/node_modules/lodash.defaults/index.js @@ -0,0 +1,668 @@ +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ + +/** Used as references for various `Number` constants. */ +var MAX_SAFE_INTEGER = 9007199254740991; + +/** `Object#toString` result references. */ +var argsTag = '[object Arguments]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]'; + +/** Used to detect unsigned integer values. */ +var reIsUint = /^(?:0|[1-9]\d*)$/; + +/** + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. + * + * @private + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. + */ +function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); +} + +/** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ +function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); + + while (++index < n) { + result[index] = iteratee(index); + } + return result; +} + +/** Used for built-in method references. */ +var objectProto = Object.prototype; + +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; + +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; + +/** Built-in value references. */ +var propertyIsEnumerable = objectProto.propertyIsEnumerable; + +/* Built-in method references for those with the same name as other `lodash` methods. */ +var nativeMax = Math.max; + +/** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ +function arrayLikeKeys(value, inherited) { + // Safari 8.1 makes `arguments.callee` enumerable in strict mode. + // Safari 9 makes `arguments.length` enumerable in strict mode. + var result = (isArray(value) || isArguments(value)) + ? baseTimes(value.length, String) + : []; + + var length = result.length, + skipIndexes = !!length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && (key == 'length' || isIndex(key, length)))) { + result.push(key); + } + } + return result; +} + +/** + * Used by `_.defaults` to customize its `_.assignIn` use. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to assign. + * @param {Object} object The parent object of `objValue`. + * @returns {*} Returns the value to assign. + */ +function assignInDefaults(objValue, srcValue, key, object) { + if (objValue === undefined || + (eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) { + return srcValue; + } + return objValue; +} + +/** + * Assigns `value` to `key` of `object` if the existing value is not equivalent + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ +function assignValue(object, key, value) { + var objValue = object[key]; + if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || + (value === undefined && !(key in object))) { + object[key] = value; + } +} + +/** + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ +function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); + } + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); + } + } + return result; +} + +/** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ +function baseRest(func, start) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = array; + return apply(func, this, otherArgs); + }; +} + +/** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ +function copyObject(source, props, object, customizer) { + object || (object = {}); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; + + assignValue(object, key, newValue === undefined ? source[key] : newValue); + } + return object; +} + +/** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ +function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined, + guard = length > 2 ? sources[2] : undefined; + + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + customizer = length < 3 ? undefined : customizer; + length = 1; + } + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); +} + +/** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ +function isIndex(value, length) { + length = length == null ? MAX_SAFE_INTEGER : length; + return !!length && + (typeof value == 'number' || reIsUint.test(value)) && + (value > -1 && value % 1 == 0 && value < length); +} + +/** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ +function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; +} + +/** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ +function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; + + return value === proto; +} + +/** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ +function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; +} + +/** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ +function eq(value, other) { + return value === other || (value !== value && other !== other); +} + +/** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ +function isArguments(value) { + // Safari 8.1 makes `arguments.callee` enumerable in strict mode. + return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') && + (!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag); +} + +/** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ +var isArray = Array.isArray; + +/** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ +function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); +} + +/** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ +function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); +} + +/** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ +function isFunction(value) { + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 8-9 which returns 'object' for typed array and other constructors. + var tag = isObject(value) ? objectToString.call(value) : ''; + return tag == funcTag || tag == genTag; +} + +/** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ +function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; +} + +/** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ +function isObject(value) { + var type = typeof value; + return !!value && (type == 'object' || type == 'function'); +} + +/** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ +function isObjectLike(value) { + return !!value && typeof value == 'object'; +} + +/** + * This method is like `_.assignIn` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extendWith + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignInWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ +var assignInWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keysIn(source), object, customizer); +}); + +/** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ +var defaults = baseRest(function(args) { + args.push(undefined, assignInDefaults); + return apply(assignInWith, undefined, args); +}); + +/** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ +function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); +} + +module.exports = defaults; diff --git a/reverse_engineering/node_modules/lodash.defaults/package.json b/reverse_engineering/node_modules/lodash.defaults/package.json new file mode 100644 index 0000000..0df212e --- /dev/null +++ b/reverse_engineering/node_modules/lodash.defaults/package.json @@ -0,0 +1,72 @@ +{ + "_args": [ + [ + "lodash.defaults@4.2.0", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "lodash.defaults@4.2.0", + "_id": "lodash.defaults@4.2.0", + "_inBundle": false, + "_integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=", + "_location": "/lodash.defaults", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "lodash.defaults@4.2.0", + "name": "lodash.defaults", + "escapedName": "lodash.defaults", + "rawSpec": "4.2.0", + "saveSpec": null, + "fetchSpec": "4.2.0" + }, + "_requiredBy": [ + "/tunnel-ssh" + ], + "_resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "_spec": "4.2.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "John-David Dalton", + "email": "john.david.dalton@gmail.com", + "url": "http://allyoucanleet.com/" + }, + "bugs": { + "url": "https://github.com/lodash/lodash/issues" + }, + "contributors": [ + { + "name": "John-David Dalton", + "email": "john.david.dalton@gmail.com", + "url": "http://allyoucanleet.com/" + }, + { + "name": "Blaine Bublitz", + "email": "blaine.bublitz@gmail.com", + "url": "https://github.com/phated" + }, + { + "name": "Mathias Bynens", + "email": "mathias@qiwi.be", + "url": "https://mathiasbynens.be/" + } + ], + "description": "The lodash method `_.defaults` exported as a module.", + "homepage": "https://lodash.com/", + "icon": "https://lodash.com/icon.svg", + "keywords": [ + "lodash-modularized", + "defaults" + ], + "license": "MIT", + "name": "lodash.defaults", + "repository": { + "type": "git", + "url": "git+https://github.com/lodash/lodash.git" + }, + "scripts": { + "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" + }, + "version": "4.2.0" +} diff --git a/reverse_engineering/node_modules/ms/index.js b/reverse_engineering/node_modules/ms/index.js new file mode 100644 index 0000000..6a522b1 --- /dev/null +++ b/reverse_engineering/node_modules/ms/index.js @@ -0,0 +1,152 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isNaN(val) === false) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + if (ms >= d) { + return Math.round(ms / d) + 'd'; + } + if (ms >= h) { + return Math.round(ms / h) + 'h'; + } + if (ms >= m) { + return Math.round(ms / m) + 'm'; + } + if (ms >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + return plural(ms, d, 'day') || + plural(ms, h, 'hour') || + plural(ms, m, 'minute') || + plural(ms, s, 'second') || + ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, n, name) { + if (ms < n) { + return; + } + if (ms < n * 1.5) { + return Math.floor(ms / n) + ' ' + name; + } + return Math.ceil(ms / n) + ' ' + name + 's'; +} diff --git a/reverse_engineering/node_modules/ms/license.md b/reverse_engineering/node_modules/ms/license.md new file mode 100644 index 0000000..69b6125 --- /dev/null +++ b/reverse_engineering/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Zeit, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/reverse_engineering/node_modules/ms/package.json b/reverse_engineering/node_modules/ms/package.json new file mode 100644 index 0000000..cead0ad --- /dev/null +++ b/reverse_engineering/node_modules/ms/package.json @@ -0,0 +1,72 @@ +{ + "_args": [ + [ + "ms@2.0.0", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "ms@2.0.0", + "_id": "ms@2.0.0", + "_inBundle": false, + "_integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "_location": "/ms", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "ms@2.0.0", + "name": "ms", + "escapedName": "ms", + "rawSpec": "2.0.0", + "saveSpec": null, + "fetchSpec": "2.0.0" + }, + "_requiredBy": [ + "/debug" + ], + "_resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "_spec": "2.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "bugs": { + "url": "https://github.com/zeit/ms/issues" + }, + "description": "Tiny milisecond conversion utility", + "devDependencies": { + "eslint": "3.19.0", + "expect.js": "0.3.1", + "husky": "0.13.3", + "lint-staged": "3.4.1", + "mocha": "3.4.1" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/zeit/ms#readme", + "license": "MIT", + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "main": "./index", + "name": "ms", + "repository": { + "type": "git", + "url": "git+https://github.com/zeit/ms.git" + }, + "scripts": { + "lint": "eslint lib/* bin/*", + "precommit": "lint-staged", + "test": "mocha tests.js" + }, + "version": "2.0.0" +} diff --git a/reverse_engineering/node_modules/ms/readme.md b/reverse_engineering/node_modules/ms/readme.md new file mode 100644 index 0000000..84a9974 --- /dev/null +++ b/reverse_engineering/node_modules/ms/readme.md @@ -0,0 +1,51 @@ +# ms + +[![Build Status](https://travis-ci.org/zeit/ms.svg?branch=master)](https://travis-ci.org/zeit/ms) +[![Slack Channel](http://zeit-slackin.now.sh/badge.svg)](https://zeit.chat/) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +``` + +### Convert from milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(ms('10 hours')) // "10h" +``` + +### Time format written-out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [node](https://nodejs.org) and in the browser. +- If a number is supplied to `ms`, a string with a unit is returned. +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`). +- If you pass a string with a number and a valid unit, the number of equivalent ms is returned. + +## Caught a bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, node will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/reverse_engineering/node_modules/packet-reader/.travis.yml b/reverse_engineering/node_modules/packet-reader/.travis.yml new file mode 100644 index 0000000..ac2e9ee --- /dev/null +++ b/reverse_engineering/node_modules/packet-reader/.travis.yml @@ -0,0 +1,8 @@ +language: node_js + +node_js: "10" +matrix: + include: + - node_js: "4" + - node_js: "6" + - node_js: "8" diff --git a/reverse_engineering/node_modules/packet-reader/README.md b/reverse_engineering/node_modules/packet-reader/README.md new file mode 100644 index 0000000..5ae3ef8 --- /dev/null +++ b/reverse_engineering/node_modules/packet-reader/README.md @@ -0,0 +1,87 @@ +node-packet-reader +================== + +Handy little well tested module for reading length-prefixed binary packets. + +Since buffers come off a socket in randomly sized chunks you can't expect them to cleanly +break on packet boundaries. This module allows you to push buffers in and read +full packets out the other side, so you can get to parsing right away and not have +to manage concatenating partial buffers and searching through them for packets. + +## install + +` $ npm install packet-reader ` + +## example + +```js +var Reader = require('packet-reader') + +var reader = new Reader() +//assuming you have a socket emitting `data` events +socket.on('data', function(buffer) { + reader.addChunk(buffer) + var packet = reader.read() + while(packet) { + //do something with fully parsed packet + } +}) +``` + + +here's a more full featured example: + +let's assume our "packet" for our protocol is 32-bit Big Endian length-prefixed strings +so a "hello world" packet would look something like [length, string] +`[0, 0, 0 0x0B, h, e, l, l, o, w, o, r, l, d]` + +```js +var Transform = require('stream').Transform +var Reader = require('packet-reader') +var reader = new Reader() +var parser = new Transform() +parser._transform = function(chunk, encoding, cb) { + reader.addChunk(chunk) + var packet = reader.read() + while(packet) { + this.push(packet.toString('utf8')) + packet = reader.read() + } + cb() +} + +var server = net.createServer(function(socket) { + socket.pipe(parser).pipe(stdout) +}) + +``` + +There are a few config options for setting optional pre-length padding byte. Read the tests for details. + +## License + +MIT + +Copyright 2015 Brian M. Carlson +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/reverse_engineering/node_modules/packet-reader/index.js b/reverse_engineering/node_modules/packet-reader/index.js new file mode 100644 index 0000000..5e97e21 --- /dev/null +++ b/reverse_engineering/node_modules/packet-reader/index.js @@ -0,0 +1,65 @@ +var assert = require('assert') + +var Reader = module.exports = function(options) { + //TODO - remove for version 1.0 + if(typeof options == 'number') { + options = { headerSize: options } + } + options = options || {} + this.offset = 0 + this.lastChunk = false + this.chunk = null + this.chunkLength = 0 + this.headerSize = options.headerSize || 0 + this.lengthPadding = options.lengthPadding || 0 + this.header = null + assert(this.headerSize < 2, 'pre-length header of more than 1 byte length not currently supported') +} + +Reader.prototype.addChunk = function(chunk) { + if (!this.chunk || this.offset === this.chunkLength) { + this.chunk = chunk + this.chunkLength = chunk.length + this.offset = 0 + return + } + + var newChunkLength = chunk.length + var newLength = this.chunkLength + newChunkLength + + if (newLength > this.chunk.length) { + var newBufferLength = this.chunk.length * 2 + while (newLength >= newBufferLength) { + newBufferLength *= 2 + } + var newBuffer = Buffer.alloc(newBufferLength) + this.chunk.copy(newBuffer) + this.chunk = newBuffer + } + chunk.copy(this.chunk, this.chunkLength) + this.chunkLength = newLength +} + +Reader.prototype.read = function() { + if(this.chunkLength < (this.headerSize + 4 + this.offset)) { + return false + } + + if(this.headerSize) { + this.header = this.chunk[this.offset] + } + + //read length of next item + var length = this.chunk.readUInt32BE(this.offset + this.headerSize) + this.lengthPadding + + //next item spans more chunks than we have + var remaining = this.chunkLength - (this.offset + 4 + this.headerSize) + if(length > remaining) { + return false + } + + this.offset += (this.headerSize + 4) + var result = this.chunk.slice(this.offset, this.offset + length) + this.offset += length + return result +} diff --git a/reverse_engineering/node_modules/packet-reader/package.json b/reverse_engineering/node_modules/packet-reader/package.json new file mode 100644 index 0000000..7349ce6 --- /dev/null +++ b/reverse_engineering/node_modules/packet-reader/package.json @@ -0,0 +1,52 @@ +{ + "_from": "packet-reader@1.0.0", + "_id": "packet-reader@1.0.0", + "_inBundle": false, + "_integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==", + "_location": "/packet-reader", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "packet-reader@1.0.0", + "name": "packet-reader", + "escapedName": "packet-reader", + "rawSpec": "1.0.0", + "saveSpec": null, + "fetchSpec": "1.0.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "_shasum": "9238e5480dedabacfe1fe3f2771063f164157d74", + "_spec": "packet-reader@1.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-packet-reader/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Read binary packets...", + "devDependencies": { + "mocha": "~1.21.5" + }, + "directories": { + "test": "test" + }, + "homepage": "https://github.com/brianc/node-packet-reader", + "license": "MIT", + "main": "index.js", + "name": "packet-reader", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-packet-reader.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "1.0.0" +} diff --git a/reverse_engineering/node_modules/packet-reader/test/index.js b/reverse_engineering/node_modules/packet-reader/test/index.js new file mode 100644 index 0000000..0e2eedb --- /dev/null +++ b/reverse_engineering/node_modules/packet-reader/test/index.js @@ -0,0 +1,148 @@ +var assert = require('assert') +var Reader = require('../') +describe('packet-reader', function() { + beforeEach(function() { + this.reader = new Reader(1) + }) + + it('reads perfect 1 length buffer', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1])) + var result = this.reader.read() + assert.equal(result.length, 1) + assert.equal(result[0], 1) + assert.strictEqual(false, this.reader.read()) + }) + + it('reads perfect longer buffer', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 4, 1, 2, 3, 4])) + var result = this.reader.read() + assert.equal(result.length, 4) + assert.strictEqual(false, this.reader.read()) + }) + + it('reads two parts', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1])) + var result = this.reader.read() + assert.strictEqual(false, result) + this.reader.addChunk(Buffer.from([2])) + var result = this.reader.read() + assert.equal(result.length, 1, 'should return 1 length buffer') + assert.equal(result[0], 2) + assert.strictEqual(this.reader.read(), false) + }) + + it('reads multi-part', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 16])) + assert.equal(false, this.reader.read()) + this.reader.addChunk(Buffer.from([1, 2, 3, 4, 5, 6, 7, 8])) + assert.equal(false, this.reader.read()) + this.reader.addChunk(Buffer.from([9, 10, 11, 12, 13, 14, 15, 16])) + var result = this.reader.read() + assert.equal(result.length, 16) + }) + + it('resets internal buffer at end of packet', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 16])) + this.reader.addChunk(Buffer.from([1, 2, 3, 4, 5, 6, 7, 8])) + this.reader.addChunk(Buffer.from([9, 10, 11, 12, 13, 14, 15, 16])) + var result = this.reader.read() + assert.equal(result.length, 16) + + var newChunk = Buffer.from([0, 0, 0, 0, 16]) + this.reader.addChunk(newChunk) + assert.equal(this.reader.offset, 0, 'should have been reset to 0.') + assert.strictEqual(this.reader.chunk, newChunk) + }) + + it('reads multiple messages from single chunk', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 2, 1, 2])) + var result = this.reader.read() + assert.equal(result.length, 1, 'should have 1 length buffer') + assert.equal(result[0], 1) + var result = this.reader.read() + assert.equal(result.length, 2, 'should have 2 length buffer but was ' + result.length) + assert.equal(result[0], 1) + assert.equal(result[1], 2) + assert.strictEqual(false, this.reader.read()) + }) + + it('reads 1 and a split', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1, 0, 0]))//, 0, 0, 2, 1, 2])) + var result = this.reader.read() + assert.equal(result.length, 1, 'should have 1 length buffer') + assert.equal(result[0], 1) + var result = this.reader.read() + assert.strictEqual(result, false) + + this.reader.addChunk(Buffer.from([0, 0, 2, 1, 2])) + var result = this.reader.read() + assert.equal(result.length, 2, 'should have 2 length buffer but was ' + result.length) + assert.equal(result[0], 1) + assert.equal(result[1], 2) + assert.strictEqual(false, this.reader.read()) + }) +}) + +describe('variable length header', function() { + beforeEach(function() { + this.reader = new Reader() + }) + + it('reads double message buffers', function() { + this.reader.addChunk(Buffer.from([ + 0, 0, 0, 1, 1, + 0, 0, 0, 2, 1, 2])) + var result = this.reader.read() + assert(result) + assert.equal(result.length, 1) + assert.equal(result[0], 1) + result = this.reader.read() + assert(result) + assert.equal(result.length, 2) + assert.equal(result[0], 1) + assert.equal(result[1], 2) + assert.strictEqual(this.reader.read(), false) + }) +}) + +describe('1 length code', function() { + beforeEach(function() { + this.reader = new Reader(1) + }) + + it('reads code', function() { + this.reader.addChunk(Buffer.from([9, 0, 0, 0, 1, 1])) + var result = this.reader.read() + assert(result) + assert.equal(this.reader.header, 9) + assert.equal(result.length, 1) + assert.equal(result[0], 1) + }) + + it('is set on uncompleted read', function() { + assert.equal(this.reader.header, null) + this.reader.addChunk(Buffer.from([2, 0, 0, 0, 1])) + assert.strictEqual(this.reader.read(), false) + assert.equal(this.reader.header, 2) + }) +}) + +describe('postgres style packet', function() { + beforeEach(function() { + this.reader = new Reader({ + headerSize: 1, + lengthPadding: -4 + }) + }) + + it('reads with padded length', function() { + this.reader.addChunk(Buffer.from([1, 0, 0, 0, 8, 0, 0, 2, 0])) + var result = this.reader.read() + assert(result) + assert.equal(result.length, 4) + assert.equal(result[0], 0) + assert.equal(result[1], 0) + assert.equal(result[2], 2) + assert.equal(result[3], 0) + }) +}) diff --git a/reverse_engineering/node_modules/pg-connection-string/LICENSE b/reverse_engineering/node_modules/pg-connection-string/LICENSE new file mode 100644 index 0000000..b068a6c --- /dev/null +++ b/reverse_engineering/node_modules/pg-connection-string/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Iced Development + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-connection-string/README.md b/reverse_engineering/node_modules/pg-connection-string/README.md new file mode 100644 index 0000000..360505e --- /dev/null +++ b/reverse_engineering/node_modules/pg-connection-string/README.md @@ -0,0 +1,77 @@ +pg-connection-string +==================== + +[![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/) + +[![Build Status](https://travis-ci.org/iceddev/pg-connection-string.svg?branch=master)](https://travis-ci.org/iceddev/pg-connection-string) +[![Coverage Status](https://coveralls.io/repos/github/iceddev/pg-connection-string/badge.svg?branch=master)](https://coveralls.io/github/iceddev/pg-connection-string?branch=master) + +Functions for dealing with a PostgresSQL connection string + +`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git) +Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +MIT License + +## Usage + +```js +var parse = require('pg-connection-string').parse; + +var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase') +``` + +The resulting config contains a subset of the following properties: + +* `host` - Postgres server hostname or, for UNIX domain sockets, the socket filename +* `port` - port on which to connect +* `user` - User with which to authenticate to the server +* `password` - Corresponding password +* `database` - Database name within the server +* `client_encoding` - string encoding the client will use +* `ssl`, either a boolean or an object with properties + * `rejectUnauthorized` + * `cert` + * `key` + * `ca` +* any other query parameters (for example, `application_name`) are preserved intact. + +## Connection Strings + +The short summary of acceptable URLs is: + + * `socket:?` - UNIX domain socket + * `postgres://:@:/?` - TCP connection + +But see below for more details. + +### UNIX Domain Sockets + +When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`. +This form can be shortened to just a path: `/var/run/pgsql`. + +When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`. + +Query parameters follow a `?` character, including the following special query parameters: + + * `db=` - sets the database name (urlencoded) + * `encoding=` - sets the `client_encoding` property + +### TCP Connections + +TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted). +If username and password are included, they should be urlencoded. +The database name, however, should *not* be urlencoded. + +Query parameters follow a `?` character, including the following special query parameters: + * `host=` - sets `host` property, overriding the URL's host + * `encoding=` - sets the `client_encoding` property + * `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly + * `sslmode=` + * `sslmode=disable` - sets `ssl` to false + * `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }` + * `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true + * `sslcert=` - reads data from the given file and includes the result as `ssl.cert` + * `sslkey=` - reads data from the given file and includes the result as `ssl.key` + * `sslrootcert=` - reads data from the given file and includes the result as `ssl.ca` + +A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty. diff --git a/reverse_engineering/node_modules/pg-connection-string/index.d.ts b/reverse_engineering/node_modules/pg-connection-string/index.d.ts new file mode 100644 index 0000000..3081270 --- /dev/null +++ b/reverse_engineering/node_modules/pg-connection-string/index.d.ts @@ -0,0 +1,15 @@ +export function parse(connectionString: string): ConnectionOptions + +export interface ConnectionOptions { + host: string | null + password?: string + user?: string + port?: string | null + database: string | null | undefined + client_encoding?: string + ssl?: boolean | string + + application_name?: string + fallback_application_name?: string + options?: string +} diff --git a/reverse_engineering/node_modules/pg-connection-string/index.js b/reverse_engineering/node_modules/pg-connection-string/index.js new file mode 100644 index 0000000..995ff06 --- /dev/null +++ b/reverse_engineering/node_modules/pg-connection-string/index.js @@ -0,0 +1,106 @@ +'use strict' + +var url = require('url') +var fs = require('fs') + +//Parse method copied from https://github.com/brianc/node-postgres +//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +//MIT License + +//parses a connection string +function parse(str) { + //unix socket + if (str.charAt(0) === '/') { + var config = str.split(' ') + return { host: config[0], database: config[1] } + } + + // url parse expects spaces encoded as %20 + var result = url.parse( + / |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str) ? encodeURI(str).replace(/\%25(\d\d)/g, '%$1') : str, + true + ) + var config = result.query + for (var k in config) { + if (Array.isArray(config[k])) { + config[k] = config[k][config[k].length - 1] + } + } + + var auth = (result.auth || ':').split(':') + config.user = auth[0] + config.password = auth.splice(1).join(':') + + config.port = result.port + if (result.protocol == 'socket:') { + config.host = decodeURI(result.pathname) + config.database = result.query.db + config.client_encoding = result.query.encoding + return config + } + if (!config.host) { + // Only set the host if there is no equivalent query param. + config.host = result.hostname + } + + // If the host is missing it might be a URL-encoded path to a socket. + var pathname = result.pathname + if (!config.host && pathname && /^%2f/i.test(pathname)) { + var pathnameSplit = pathname.split('/') + config.host = decodeURIComponent(pathnameSplit[0]) + pathname = pathnameSplit.splice(1).join('/') + } + // result.pathname is not always guaranteed to have a '/' prefix (e.g. relative urls) + // only strip the slash if it is present. + if (pathname && pathname.charAt(0) === '/') { + pathname = pathname.slice(1) || null + } + config.database = pathname && decodeURI(pathname) + + if (config.ssl === 'true' || config.ssl === '1') { + config.ssl = true + } + + if (config.ssl === '0') { + config.ssl = false + } + + if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) { + config.ssl = {} + } + + if (config.sslcert) { + config.ssl.cert = fs.readFileSync(config.sslcert).toString() + } + + if (config.sslkey) { + config.ssl.key = fs.readFileSync(config.sslkey).toString() + } + + if (config.sslrootcert) { + config.ssl.ca = fs.readFileSync(config.sslrootcert).toString() + } + + switch (config.sslmode) { + case 'disable': { + config.ssl = false + break + } + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': { + break + } + case 'no-verify': { + config.ssl.rejectUnauthorized = false + break + } + } + + return config +} + +module.exports = parse + +parse.parse = parse diff --git a/reverse_engineering/node_modules/pg-connection-string/package.json b/reverse_engineering/node_modules/pg-connection-string/package.json new file mode 100644 index 0000000..03b64fd --- /dev/null +++ b/reverse_engineering/node_modules/pg-connection-string/package.json @@ -0,0 +1,69 @@ +{ + "_from": "pg-connection-string@^2.5.0", + "_id": "pg-connection-string@2.5.0", + "_inBundle": false, + "_integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==", + "_location": "/pg-connection-string", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-connection-string@^2.5.0", + "name": "pg-connection-string", + "escapedName": "pg-connection-string", + "rawSpec": "^2.5.0", + "saveSpec": null, + "fetchSpec": "^2.5.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "_shasum": "538cadd0f7e603fc09a12590f3b8a452c2c0cf34", + "_spec": "pg-connection-string@^2.5.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "author": { + "name": "Blaine Bublitz", + "email": "blaine@iceddev.com", + "url": "http://iceddev.com/" + }, + "bugs": { + "url": "https://github.com/brianc/node-postgres/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Functions for dealing with a PostgresSQL connection string", + "devDependencies": { + "chai": "^4.1.1", + "coveralls": "^3.0.4", + "istanbul": "^0.4.5", + "mocha": "^7.1.2" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "gitHead": "d45947938263bec30a1e3252452f04177b785f66", + "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string", + "keywords": [ + "pg", + "connection", + "string", + "parse" + ], + "license": "MIT", + "main": "./index.js", + "name": "pg-connection-string", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-connection-string" + }, + "scripts": { + "check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100", + "coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls", + "test": "istanbul cover _mocha && npm run check-coverage" + }, + "types": "./index.d.ts", + "version": "2.5.0" +} diff --git a/reverse_engineering/node_modules/pg-int8/LICENSE b/reverse_engineering/node_modules/pg-int8/LICENSE new file mode 100644 index 0000000..c56c973 --- /dev/null +++ b/reverse_engineering/node_modules/pg-int8/LICENSE @@ -0,0 +1,13 @@ +Copyright © 2017, Charmander <~@charmander.me> + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/reverse_engineering/node_modules/pg-int8/README.md b/reverse_engineering/node_modules/pg-int8/README.md new file mode 100644 index 0000000..ef2e608 --- /dev/null +++ b/reverse_engineering/node_modules/pg-int8/README.md @@ -0,0 +1,16 @@ +[![Build status][ci image]][ci] + +64-bit big-endian signed integer-to-string conversion designed for [pg][]. + +```js +const readInt8 = require('pg-int8'); + +readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7])) +// '283686952306183' +``` + + + [pg]: https://github.com/brianc/node-postgres + + [ci]: https://travis-ci.org/charmander/pg-int8 + [ci image]: https://api.travis-ci.org/charmander/pg-int8.svg diff --git a/reverse_engineering/node_modules/pg-int8/index.js b/reverse_engineering/node_modules/pg-int8/index.js new file mode 100644 index 0000000..db77975 --- /dev/null +++ b/reverse_engineering/node_modules/pg-int8/index.js @@ -0,0 +1,100 @@ +'use strict'; + +// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer +var BASE = 1000000; + +function readInt8(buffer) { + var high = buffer.readInt32BE(0); + var low = buffer.readUInt32BE(4); + var sign = ''; + + if (high < 0) { + high = ~high + (low === 0); + low = (~low + 1) >>> 0; + sign = '-'; + } + + var result = ''; + var carry; + var t; + var digits; + var pad; + var l; + var i; + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + t = 0x100000000 * carry + low; + digits = '' + t % BASE; + + return sign + digits + result; + } +} + +module.exports = readInt8; diff --git a/reverse_engineering/node_modules/pg-int8/package.json b/reverse_engineering/node_modules/pg-int8/package.json new file mode 100644 index 0000000..2964a2b --- /dev/null +++ b/reverse_engineering/node_modules/pg-int8/package.json @@ -0,0 +1,52 @@ +{ + "_from": "pg-int8@1.0.1", + "_id": "pg-int8@1.0.1", + "_inBundle": false, + "_integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "_location": "/pg-int8", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "pg-int8@1.0.1", + "name": "pg-int8", + "escapedName": "pg-int8", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "_shasum": "943bd463bf5b71b4170115f80f8efc9a0c0eb78c", + "_spec": "pg-int8@1.0.1", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "bugs": { + "url": "https://github.com/charmander/pg-int8/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "64-bit big-endian signed integer-to-string conversion", + "devDependencies": { + "@charmander/eslint-config-base": "1.0.2", + "tap": "10.7.3" + }, + "engines": { + "node": ">=4.0.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/charmander/pg-int8#readme", + "license": "ISC", + "name": "pg-int8", + "repository": { + "type": "git", + "url": "git+https://github.com/charmander/pg-int8.git" + }, + "scripts": { + "test": "tap test" + }, + "version": "1.0.1" +} diff --git a/reverse_engineering/node_modules/pg-pool/LICENSE b/reverse_engineering/node_modules/pg-pool/LICENSE new file mode 100644 index 0000000..4e90581 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/reverse_engineering/node_modules/pg-pool/README.md b/reverse_engineering/node_modules/pg-pool/README.md new file mode 100644 index 0000000..c6d7e92 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/README.md @@ -0,0 +1,376 @@ +# pg-pool +[![Build Status](https://travis-ci.org/brianc/node-pg-pool.svg?branch=master)](https://travis-ci.org/brianc/node-pg-pool) + +A connection pool for node-postgres + +## install +```sh +npm i pg-pool pg +``` + +## use + +### create + +to use pg-pool you must first create an instance of a pool + +```js +var Pool = require('pg-pool') + +// by default the pool uses the same +// configuration as whatever `pg` version you have installed +var pool = new Pool() + +// you can pass properties to the pool +// these properties are passed unchanged to both the node-postgres Client constructor +// and the node-pool (https://github.com/coopernurse/node-pool) constructor +// allowing you to fully configure the behavior of both +var pool2 = new Pool({ + database: 'postgres', + user: 'brianc', + password: 'secret!', + port: 5432, + ssl: true, + max: 20, // set pool max size to 20 + idleTimeoutMillis: 1000, // close idle clients after 1 second + connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established + maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion) +}) + +//you can supply a custom client constructor +//if you want to use the native postgres client +var NativeClient = require('pg').native.Client +var nativePool = new Pool({ Client: NativeClient }) + +//you can even pool pg-native clients directly +var PgNativeClient = require('pg-native') +var pgNativePool = new Pool({ Client: PgNativeClient }) +``` + +##### Note: +The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL. + +```js +const Pool = require('pg-pool'); +const url = require('url') + +const params = url.parse(process.env.DATABASE_URL); +const auth = params.auth.split(':'); + +const config = { + user: auth[0], + password: auth[1], + host: params.hostname, + port: params.port, + database: params.pathname.split('/')[1], + ssl: true +}; + +const pool = new Pool(config); + +/* + Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into + config = { + user: 'DBuser', + password: 'secret', + host: 'DBHost', + port: '#####', + database: 'myDB', + ssl: true + } +*/ +``` + +### acquire clients with a promise + +pg-pool supports a fully promise-based api for acquiring clients + +```js +var pool = new Pool() +pool.connect().then(client => { + client.query('select $1::text as name', ['pg-pool']).then(res => { + client.release() + console.log('hello from', res.rows[0].name) + }) + .catch(e => { + client.release() + console.error('query error', e.message, e.stack) + }) +}) +``` + +### plays nice with async/await + +this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await: + +```js +// with async/await +(async () => { + var pool = new Pool() + var client = await pool.connect() + try { + var result = await client.query('select $1::text as name', ['brianc']) + console.log('hello from', result.rows[0]) + } finally { + client.release() + } +})().catch(e => console.error(e.message, e.stack)) + +// with co +co(function * () { + var client = yield pool.connect() + try { + var result = yield client.query('select $1::text as name', ['brianc']) + console.log('hello from', result.rows[0]) + } finally { + client.release() + } +}).catch(e => console.error(e.message, e.stack)) +``` + +### your new favorite helper method + +because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in: + +```js +var pool = new Pool() +var time = await pool.query('SELECT NOW()') +var name = await pool.query('select $1::text as name', ['brianc']) +console.log(name.rows[0].name, 'says hello at', time.rows[0].name) +``` + +you can also use a callback here if you'd like: + +```js +var pool = new Pool() +pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + console.log(res.rows[0].name) // brianc +}) +``` + +__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you +have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor) +you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return +clients back to the pool after the query is done. + +### drop-in backwards compatible + +pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years: + +```js +var pool = new Pool() +pool.connect((err, client, done) => { + if (err) return done(err) + + client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => { + done() + if (err) { + return console.error('query error', e.message, e.stack) + } + console.log('hello from', res.rows[0].name) + }) +}) +``` + +### shut it down + +When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app +will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows: + +```js +var pool = new Pool() +var client = await pool.connect() +console.log(await client.query('select now()')) +client.release() +await pool.end() +``` + +### a note on instances + +The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example: + +```js +// assume this is a file in your program at ./your-app/lib/db.js + +// correct usage: create the pool and let it live +// 'globally' here, controlling access to it through exported methods +var pool = new pg.Pool() + +// this is the right way to export the query method +module.exports.query = (text, values) => { + console.log('query:', text, values) + return pool.query(text, values) +} + +// this would be the WRONG way to export the connect method +module.exports.connect = () => { + // notice how we would be creating a pool instance here + // every time we called 'connect' to get a new client? + // that's a bad thing & results in creating an unbounded + // number of pools & therefore connections + var aPool = new pg.Pool() + return aPool.connect() +} +``` + +### events + +Every instance of a `Pool` is an event emitter. These instances emit the following events: + +#### error + +Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients. + +Example: + +```js +const Pool = require('pg-pool') +const pool = new Pool() + +// attach an error handler to the pool for when a connected, idle client +// receives an error by being disconnected, etc +pool.on('error', function(error, client) { + // handle this in the same way you would treat process.on('uncaughtException') + // it is supplied the error as well as the idle client which received the error +}) +``` + +#### connect + +Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend. + +Example: + +```js +const Pool = require('pg-pool') +const pool = new Pool() + +var count = 0 + +pool.on('connect', client => { + client.count = count++ +}) + +pool + .connect() + .then(client => { + return client + .query('SELECT $1::int AS "clientCount"', [client.count]) + .then(res => console.log(res.rows[0].clientCount)) // outputs 0 + .then(() => client) + }) + .then(client => client.release()) + +``` + +#### acquire + +Fired whenever the a client is acquired from the pool + +Example: + +This allows you to count the number of clients which have ever been acquired from the pool. + +```js +var Pool = require('pg-pool') +var pool = new Pool() + +var acquireCount = 0 +pool.on('acquire', function (client) { + acquireCount++ +}) + +var connectCount = 0 +pool.on('connect', function () { + connectCount++ +}) + +for (var i = 0; i < 200; i++) { + pool.query('SELECT NOW()') +} + +setTimeout(function () { + console.log('connect count:', connectCount) // output: connect count: 10 + console.log('acquire count:', acquireCount) // output: acquire count: 200 +}, 100) + +``` + +### environment variables + +pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are: + +``` +PGDATABASE=my_db +PGUSER=username +PGPASSWORD="my awesome password" +PGPORT=5432 +PGSSLMODE=require +``` + +Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box. + +## bring your own promise + +In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this: + +```js +// first run `npm install promise-polyfill --save +if (typeof Promise == 'undefined') { + global.Promise = require('promise-polyfill') +} +``` + +You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level: + +```js +var bluebirdPool = new Pool({ + Promise: require('bluebird') +}) +``` + +__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own." + +## maxUses and read-replica autoscaling (e.g. AWS Aurora) + +The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections. + +The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing. + +Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections. + +If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas. + +This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance. + +You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value: + +``` +maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize +``` + +In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window: + +``` +maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize + 7200 = 1800 * 1000 / 10 / 25 +``` + +## tests + +To run tests clone the repo, `npm i` in the working dir, and then run `npm test` + +## contributions + +I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there. + +## license + +The MIT License (MIT) +Copyright (c) 2016 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/reverse_engineering/node_modules/pg-pool/index.js b/reverse_engineering/node_modules/pg-pool/index.js new file mode 100644 index 0000000..48bf5c7 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/index.js @@ -0,0 +1,421 @@ +'use strict' +const EventEmitter = require('events').EventEmitter + +const NOOP = function () {} + +const removeWhere = (list, predicate) => { + const i = list.findIndex(predicate) + + return i === -1 ? undefined : list.splice(i, 1)[0] +} + +class IdleItem { + constructor(client, idleListener, timeoutId) { + this.client = client + this.idleListener = idleListener + this.timeoutId = timeoutId + } +} + +class PendingItem { + constructor(callback) { + this.callback = callback + } +} + +function throwOnDoubleRelease() { + throw new Error('Release called on client which has already been released to the pool.') +} + +function promisify(Promise, callback) { + if (callback) { + return { callback: callback, result: undefined } + } + let rej + let res + const cb = function (err, client) { + err ? rej(err) : res(client) + } + const result = new Promise(function (resolve, reject) { + res = resolve + rej = reject + }) + return { callback: cb, result: result } +} + +function makeIdleListener(pool, client) { + return function idleListener(err) { + err.client = client + + client.removeListener('error', idleListener) + client.on('error', () => { + pool.log('additional client error after disconnection due to error', err) + }) + pool._remove(client) + // TODO - document that once the pool emits an error + // the client has already been closed & purged and is unusable + pool.emit('error', err, client) + } +} + +class Pool extends EventEmitter { + constructor(options, Client) { + super() + this.options = Object.assign({}, options) + + if (options != null && 'password' in options) { + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: options.password, + }) + } + if (options != null && options.ssl && options.ssl.key) { + // "hiding" the ssl->key so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options.ssl, 'key', { + enumerable: false, + }) + } + + this.options.max = this.options.max || this.options.poolSize || 10 + this.options.maxUses = this.options.maxUses || Infinity + this.options.allowExitOnIdle = this.options.allowExitOnIdle || false + this.log = this.options.log || function () {} + this.Client = this.options.Client || Client || require('pg').Client + this.Promise = this.options.Promise || global.Promise + + if (typeof this.options.idleTimeoutMillis === 'undefined') { + this.options.idleTimeoutMillis = 10000 + } + + this._clients = [] + this._idle = [] + this._pendingQueue = [] + this._endCallback = undefined + this.ending = false + this.ended = false + } + + _isFull() { + return this._clients.length >= this.options.max + } + + _pulseQueue() { + this.log('pulse queue') + if (this.ended) { + this.log('pulse queue ended') + return + } + if (this.ending) { + this.log('pulse queue on ending') + if (this._idle.length) { + this._idle.slice().map((item) => { + this._remove(item.client) + }) + } + if (!this._clients.length) { + this.ended = true + this._endCallback() + } + return + } + // if we don't have any waiting, do nothing + if (!this._pendingQueue.length) { + this.log('no queued requests') + return + } + // if we don't have any idle clients and we have no more room do nothing + if (!this._idle.length && this._isFull()) { + return + } + const pendingItem = this._pendingQueue.shift() + if (this._idle.length) { + const idleItem = this._idle.pop() + clearTimeout(idleItem.timeoutId) + const client = idleItem.client + client.ref && client.ref() + const idleListener = idleItem.idleListener + + return this._acquireClient(client, pendingItem, idleListener, false) + } + if (!this._isFull()) { + return this.newClient(pendingItem) + } + throw new Error('unexpected condition') + } + + _remove(client) { + const removed = removeWhere(this._idle, (item) => item.client === client) + + if (removed !== undefined) { + clearTimeout(removed.timeoutId) + } + + this._clients = this._clients.filter((c) => c !== client) + client.end() + this.emit('remove', client) + } + + connect(cb) { + if (this.ending) { + const err = new Error('Cannot use a pool after calling end on the pool') + return cb ? cb(err) : this.Promise.reject(err) + } + + const response = promisify(this.Promise, cb) + const result = response.result + + // if we don't have to connect a new client, don't do so + if (this._isFull() || this._idle.length) { + // if we have idle clients schedule a pulse immediately + if (this._idle.length) { + process.nextTick(() => this._pulseQueue()) + } + + if (!this.options.connectionTimeoutMillis) { + this._pendingQueue.push(new PendingItem(response.callback)) + return result + } + + const queueCallback = (err, res, done) => { + clearTimeout(tid) + response.callback(err, res, done) + } + + const pendingItem = new PendingItem(queueCallback) + + // set connection timeout on checking out an existing client + const tid = setTimeout(() => { + // remove the callback from pending waiters because + // we're going to call it with a timeout error + removeWhere(this._pendingQueue, (i) => i.callback === queueCallback) + pendingItem.timedOut = true + response.callback(new Error('timeout exceeded when trying to connect')) + }, this.options.connectionTimeoutMillis) + + this._pendingQueue.push(pendingItem) + return result + } + + this.newClient(new PendingItem(response.callback)) + + return result + } + + newClient(pendingItem) { + const client = new this.Client(this.options) + this._clients.push(client) + const idleListener = makeIdleListener(this, client) + + this.log('checking client timeout') + + // connection timeout logic + let tid + let timeoutHit = false + if (this.options.connectionTimeoutMillis) { + tid = setTimeout(() => { + this.log('ending client due to timeout') + timeoutHit = true + // force kill the node driver, and let libpq do its teardown + client.connection ? client.connection.stream.destroy() : client.end() + }, this.options.connectionTimeoutMillis) + } + + this.log('connecting new client') + client.connect((err) => { + if (tid) { + clearTimeout(tid) + } + client.on('error', idleListener) + if (err) { + this.log('client failed to connect', err) + // remove the dead client from our list of clients + this._clients = this._clients.filter((c) => c !== client) + if (timeoutHit) { + err.message = 'Connection terminated due to connection timeout' + } + + // this client won’t be released, so move on immediately + this._pulseQueue() + + if (!pendingItem.timedOut) { + pendingItem.callback(err, undefined, NOOP) + } + } else { + this.log('new client connected') + + return this._acquireClient(client, pendingItem, idleListener, true) + } + }) + } + + // acquire a client for a pending work item + _acquireClient(client, pendingItem, idleListener, isNew) { + if (isNew) { + this.emit('connect', client) + } + + this.emit('acquire', client) + + client.release = this._releaseOnce(client, idleListener) + + client.removeListener('error', idleListener) + + if (!pendingItem.timedOut) { + if (isNew && this.options.verify) { + this.options.verify(client, (err) => { + if (err) { + client.release(err) + return pendingItem.callback(err, undefined, NOOP) + } + + pendingItem.callback(undefined, client, client.release) + }) + } else { + pendingItem.callback(undefined, client, client.release) + } + } else { + if (isNew && this.options.verify) { + this.options.verify(client, client.release) + } else { + client.release() + } + } + } + + // returns a function that wraps _release and throws if called more than once + _releaseOnce(client, idleListener) { + let released = false + + return (err) => { + if (released) { + throwOnDoubleRelease() + } + + released = true + this._release(client, idleListener, err) + } + } + + // release a client back to the poll, include an error + // to remove it from the pool + _release(client, idleListener, err) { + client.on('error', idleListener) + + client._poolUseCount = (client._poolUseCount || 0) + 1 + + // TODO(bmc): expose a proper, public interface _queryable and _ending + if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) { + if (client._poolUseCount >= this.options.maxUses) { + this.log('remove expended client') + } + this._remove(client) + this._pulseQueue() + return + } + + // idle timeout + let tid + if (this.options.idleTimeoutMillis) { + tid = setTimeout(() => { + this.log('remove idle client') + this._remove(client) + }, this.options.idleTimeoutMillis) + + if (this.options.allowExitOnIdle) { + // allow Node to exit if this is all that's left + tid.unref() + } + } + + if (this.options.allowExitOnIdle) { + client.unref() + } + + this._idle.push(new IdleItem(client, idleListener, tid)) + this._pulseQueue() + } + + query(text, values, cb) { + // guard clause against passing a function as the first parameter + if (typeof text === 'function') { + const response = promisify(this.Promise, text) + setImmediate(function () { + return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported')) + }) + return response.result + } + + // allow plain text query without values + if (typeof values === 'function') { + cb = values + values = undefined + } + const response = promisify(this.Promise, cb) + cb = response.callback + + this.connect((err, client) => { + if (err) { + return cb(err) + } + + let clientReleased = false + const onError = (err) => { + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + cb(err) + } + + client.once('error', onError) + this.log('dispatching query') + client.query(text, values, (err, res) => { + this.log('query dispatched') + client.removeListener('error', onError) + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + if (err) { + return cb(err) + } else { + return cb(undefined, res) + } + }) + }) + return response.result + } + + end(cb) { + this.log('ending') + if (this.ending) { + const err = new Error('Called end on pool more than once') + return cb ? cb(err) : this.Promise.reject(err) + } + this.ending = true + const promised = promisify(this.Promise, cb) + this._endCallback = promised.callback + this._pulseQueue() + return promised.result + } + + get waitingCount() { + return this._pendingQueue.length + } + + get idleCount() { + return this._idle.length + } + + get totalCount() { + return this._clients.length + } +} +module.exports = Pool diff --git a/reverse_engineering/node_modules/pg-pool/package.json b/reverse_engineering/node_modules/pg-pool/package.json new file mode 100644 index 0000000..b8b9bb9 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/package.json @@ -0,0 +1,68 @@ +{ + "_from": "pg-pool@^3.4.1", + "_id": "pg-pool@3.4.1", + "_inBundle": false, + "_integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==", + "_location": "/pg-pool", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-pool@^3.4.1", + "name": "pg-pool", + "escapedName": "pg-pool", + "rawSpec": "^3.4.1", + "saveSpec": null, + "fetchSpec": "^3.4.1" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz", + "_shasum": "0e71ce2c67b442a5e862a9c182172c37eda71e9c", + "_spec": "pg-pool@^3.4.1", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-pg-pool/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Connection pool for node-postgres", + "devDependencies": { + "bluebird": "3.4.1", + "co": "4.6.0", + "expect.js": "0.3.1", + "lodash": "^4.17.11", + "mocha": "^7.1.2", + "pg-cursor": "^1.3.0" + }, + "directories": { + "test": "test" + }, + "gitHead": "92b4d37926c276d343bfe56447ff6f526af757cf", + "homepage": "https://github.com/brianc/node-pg-pool#readme", + "keywords": [ + "pg", + "postgres", + "pool", + "database" + ], + "license": "MIT", + "main": "index.js", + "name": "pg-pool", + "peerDependencies": { + "pg": ">=8.0" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-pool" + }, + "scripts": { + "test": " node_modules/.bin/mocha" + }, + "version": "3.4.1" +} diff --git a/reverse_engineering/node_modules/pg-pool/test/bring-your-own-promise.js b/reverse_engineering/node_modules/pg-pool/test/bring-your-own-promise.js new file mode 100644 index 0000000..e905ccc --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/bring-your-own-promise.js @@ -0,0 +1,42 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const BluebirdPromise = require('bluebird') + +const Pool = require('../') + +const checkType = (promise) => { + expect(promise).to.be.a(BluebirdPromise) + return promise.catch((e) => undefined) +} + +describe('Bring your own promise', function () { + it( + 'uses supplied promise for operations', + co.wrap(function* () { + const pool = new Pool({ Promise: BluebirdPromise }) + const client1 = yield checkType(pool.connect()) + client1.release() + yield checkType(pool.query('SELECT NOW()')) + const client2 = yield checkType(pool.connect()) + // TODO - make sure pg supports BYOP as well + client2.release() + yield checkType(pool.end()) + }) + ) + + it( + 'uses promises in errors', + co.wrap(function* () { + const pool = new Pool({ Promise: BluebirdPromise, port: 48484 }) + yield checkType(pool.connect()) + yield checkType(pool.end()) + yield checkType(pool.connect()) + yield checkType(pool.query()) + yield checkType(pool.end()) + }) + ) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/connection-strings.js b/reverse_engineering/node_modules/pg-pool/test/connection-strings.js new file mode 100644 index 0000000..de45830 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/connection-strings.js @@ -0,0 +1,29 @@ +const expect = require('expect.js') +const describe = require('mocha').describe +const it = require('mocha').it +const Pool = require('../') + +describe('Connection strings', function () { + it('pool delegates connectionString property to client', function (done) { + const connectionString = 'postgres://foo:bar@baz:1234/xur' + + const pool = new Pool({ + // use a fake client so we can check we're passed the connectionString + Client: function (args) { + expect(args.connectionString).to.equal(connectionString) + return { + connect: function (cb) { + cb(new Error('testing')) + }, + on: function () {}, + } + }, + connectionString: connectionString, + }) + + pool.connect(function (err, client) { + expect(err).to.not.be(undefined) + done() + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/connection-timeout.js b/reverse_engineering/node_modules/pg-pool/test/connection-timeout.js new file mode 100644 index 0000000..05e8931 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/connection-timeout.js @@ -0,0 +1,229 @@ +'use strict' +const net = require('net') +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const before = require('mocha').before +const after = require('mocha').after + +const Pool = require('../') + +describe('connection timeout', () => { + const connectionFailure = new Error('Temporary connection failure') + + before((done) => { + this.server = net.createServer((socket) => { + socket.on('data', () => { + // discard any buffered data or the server wont terminate + }) + }) + + this.server.listen(() => { + this.port = this.server.address().port + done() + }) + }) + + after((done) => { + this.server.close(done) + }) + + it('should callback with an error if timeout is passed', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' }) + pool.connect((err, client, release) => { + expect(err).to.be.an(Error) + expect(err.message).to.contain('timeout') + expect(client).to.equal(undefined) + expect(pool.idleCount).to.equal(0) + done() + }) + }) + + it('should reject promise with an error if timeout is passed', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' }) + pool.connect().catch((err) => { + expect(err).to.be.an(Error) + expect(err.message).to.contain('timeout') + expect(pool.idleCount).to.equal(0) + done() + }) + }) + + it( + 'should handle multiple timeouts', + co.wrap( + function* () { + const errors = [] + const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' }) + for (var i = 0; i < 15; i++) { + try { + yield pool.connect() + } catch (e) { + errors.push(e) + } + } + expect(errors).to.have.length(15) + }.bind(this) + ) + ) + + it('should timeout on checkout of used connection', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + release() + pool.end(done) + }) + }) + }) + + it('should not break further pending checkouts on a timeout', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 200, max: 1 }) + pool.connect((err, client, releaseOuter) => { + expect(err).to.be(undefined) + + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + releaseOuter() + }) + + setTimeout(() => { + pool.connect((err, client, releaseInner) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + releaseInner() + pool.end(done) + }) + }, 100) + }) + }) + + it('should timeout on query if all clients are busy', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.query('select now()', (err, result) => { + expect(err).to.be.an(Error) + expect(result).to.be(undefined) + release() + pool.end(done) + }) + }) + }) + + it('should recover from timeout errors', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.query('select now()', (err, result) => { + expect(err).to.be.an(Error) + expect(result).to.be(undefined) + release() + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + }) + + it('continues processing after a connection failure', (done) => { + const Client = require('pg').Client + const orgConnect = Client.prototype.connect + let called = false + + Client.prototype.connect = function (cb) { + // Simulate a failure on first call + if (!called) { + called = true + + return setTimeout(() => { + cb(connectionFailure) + }, 100) + } + // And pass-through the second call + orgConnect.call(this, cb) + } + + const pool = new Pool({ + Client: Client, + connectionTimeoutMillis: 1000, + max: 1, + }) + + pool.connect((err, client, release) => { + expect(err).to.be(connectionFailure) + + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + + it('releases newly connected clients if the queued already timed out', (done) => { + const Client = require('pg').Client + + const orgConnect = Client.prototype.connect + + let connection = 0 + + Client.prototype.connect = function (cb) { + // Simulate a failure on first call + if (connection === 0) { + connection++ + + return setTimeout(() => { + cb(connectionFailure) + }, 300) + } + + // And second connect taking > connection timeout + if (connection === 1) { + connection++ + + return setTimeout(() => { + orgConnect.call(this, cb) + }, 1000) + } + + orgConnect.call(this, cb) + } + + const pool = new Pool({ + Client: Client, + connectionTimeoutMillis: 1000, + max: 1, + }) + + // Direct connect + pool.connect((err, client, release) => { + expect(err).to.be(connectionFailure) + }) + + // Queued + let called = 0 + pool.connect((err, client, release) => { + // Verify the callback is only called once + expect(called++).to.be(0) + expect(err).to.be.an(Error) + + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/ending.js b/reverse_engineering/node_modules/pg-pool/test/ending.js new file mode 100644 index 0000000..e1839b4 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/ending.js @@ -0,0 +1,40 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool ending', () => { + it('ends without being used', (done) => { + const pool = new Pool() + pool.end(done) + }) + + it('ends with a promise', () => { + return new Pool().end() + }) + + it( + 'ends with clients', + co.wrap(function* () { + const pool = new Pool() + const res = yield pool.query('SELECT $1::text as name', ['brianc']) + expect(res.rows[0].name).to.equal('brianc') + return pool.end() + }) + ) + + it( + 'allows client to finish', + co.wrap(function* () { + const pool = new Pool() + const query = pool.query('SELECT $1::text as name', ['brianc']) + yield pool.end() + const res = yield query + expect(res.rows[0].name).to.equal('brianc') + }) + ) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/error-handling.js b/reverse_engineering/node_modules/pg-pool/test/error-handling.js new file mode 100644 index 0000000..0a996b8 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/error-handling.js @@ -0,0 +1,248 @@ +'use strict' +const net = require('net') +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool error handling', function () { + it('Should complete these queries without dying', function (done) { + const pool = new Pool() + let errors = 0 + let shouldGet = 0 + function runErrorQuery() { + shouldGet++ + return new Promise(function (resolve, reject) { + pool + .query("SELECT 'asd'+1 ") + .then(function (res) { + reject(res) // this should always error + }) + .catch(function (err) { + errors++ + resolve(err) + }) + }) + } + const ps = [] + for (let i = 0; i < 5; i++) { + ps.push(runErrorQuery()) + } + Promise.all(ps).then(function () { + expect(shouldGet).to.eql(errors) + pool.end(done) + }) + }) + + describe('calling release more than once', () => { + it( + 'should throw each time', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + client.release() + expect(() => client.release()).to.throwError() + expect(() => client.release()).to.throwError() + return yield pool.end() + }) + ) + + it('should throw each time with callbacks', function (done) { + const pool = new Pool() + + pool.connect(function (err, client, clientDone) { + expect(err).not.to.be.an(Error) + clientDone() + + expect(() => clientDone()).to.throwError() + expect(() => clientDone()).to.throwError() + + pool.end(done) + }) + }) + }) + + describe('using an ended pool', () => { + it('rejects all additional promises', (done) => { + const pool = new Pool() + const promises = [] + pool.end().then(() => { + const squash = (promise) => promise.catch((e) => 'okay!') + promises.push(squash(pool.connect())) + promises.push(squash(pool.query('SELECT NOW()'))) + promises.push(squash(pool.end())) + Promise.all(promises).then((res) => { + expect(res).to.eql(['okay!', 'okay!', 'okay!']) + done() + }) + }) + }) + + it('returns an error on all additional callbacks', (done) => { + const pool = new Pool() + pool.end(() => { + pool.query('SELECT *', (err) => { + expect(err).to.be.an(Error) + pool.connect((err) => { + expect(err).to.be.an(Error) + pool.end((err) => { + expect(err).to.be.an(Error) + done() + }) + }) + }) + }) + }) + }) + + describe('error from idle client', () => { + it( + 'removes client from pool', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.waitingCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + client.release() + yield new Promise((resolve, reject) => { + process.nextTick(() => { + let poolError + pool.once('error', (err) => { + poolError = err + }) + + let clientError + client.once('error', (err) => { + clientError = err + }) + + client.emit('error', new Error('expected')) + + expect(clientError.message).to.equal('expected') + expect(poolError.message).to.equal('expected') + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + pool.end().then(resolve, reject) + }) + }) + }) + ) + }) + + describe('error from in-use client', () => { + it( + 'keeps the client in the pool', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.waitingCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + + yield new Promise((resolve, reject) => { + process.nextTick(() => { + let poolError + pool.once('error', (err) => { + poolError = err + }) + + let clientError + client.once('error', (err) => { + clientError = err + }) + + client.emit('error', new Error('expected')) + + expect(clientError.message).to.equal('expected') + expect(poolError).not.to.be.ok() + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + client.release() + pool.end().then(resolve, reject) + }) + }) + }) + ) + }) + + describe('passing a function to pool.query', () => { + it('calls back with error', (done) => { + const pool = new Pool() + console.log('passing fn to query') + pool.query((err) => { + expect(err).to.be.an(Error) + pool.end(done) + }) + }) + }) + + describe('pool with lots of errors', () => { + it( + 'continues to work and provide new clients', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + const errors = [] + for (var i = 0; i < 20; i++) { + try { + yield pool.query('invalid sql') + } catch (err) { + errors.push(err) + } + } + expect(errors).to.have.length(20) + expect(pool.idleCount).to.equal(0) + expect(pool.query).to.be.a(Function) + const res = yield pool.query('SELECT $1::text as name', ['brianc']) + expect(res.rows).to.have.length(1) + expect(res.rows[0].name).to.equal('brianc') + return pool.end() + }) + ) + }) + + it('should continue with queued items after a connection failure', (done) => { + const closeServer = net + .createServer((socket) => { + socket.destroy() + }) + .unref() + + closeServer.listen(() => { + const pool = new Pool({ max: 1, port: closeServer.address().port, host: 'localhost' }) + pool.connect((err) => { + expect(err).to.be.an(Error) + if (err.code) { + expect(err.code).to.be('ECONNRESET') + } + }) + pool.connect((err) => { + expect(err).to.be.an(Error) + if (err.code) { + expect(err.code).to.be('ECONNRESET') + } + closeServer.close(() => { + pool.end(done) + }) + }) + }) + }) + + it('handles post-checkout client failures in pool.query', (done) => { + const pool = new Pool({ max: 1 }) + pool.on('error', () => { + // We double close the connection in this test, prevent exception caused by that + }) + pool.query('SELECT pg_sleep(5)', [], (err) => { + expect(err).to.be.an(Error) + done() + }) + + setTimeout(() => { + pool._clients[0].end() + }, 1000) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/events.js b/reverse_engineering/node_modules/pg-pool/test/events.js new file mode 100644 index 0000000..6197924 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/events.js @@ -0,0 +1,86 @@ +'use strict' + +const expect = require('expect.js') +const EventEmitter = require('events').EventEmitter +const describe = require('mocha').describe +const it = require('mocha').it +const Pool = require('../') + +describe('events', function () { + it('emits connect before callback', function (done) { + const pool = new Pool() + let emittedClient = false + pool.on('connect', function (client) { + emittedClient = client + }) + + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + pool.end() + expect(client).to.be(emittedClient) + done() + }) + }) + + it('emits "connect" only with a successful connection', function () { + const pool = new Pool({ + // This client will always fail to connect + Client: mockClient({ + connect: function (cb) { + process.nextTick(() => { + cb(new Error('bad news')) + }) + }, + }), + }) + pool.on('connect', function () { + throw new Error('should never get here') + }) + return pool.connect().catch((e) => expect(e.message).to.equal('bad news')) + }) + + it('emits acquire every time a client is acquired', function (done) { + const pool = new Pool() + let acquireCount = 0 + pool.on('acquire', function (client) { + expect(client).to.be.ok() + acquireCount++ + }) + for (let i = 0; i < 10; i++) { + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + }) + pool.query('SELECT now()') + } + setTimeout(function () { + expect(acquireCount).to.be(20) + pool.end(done) + }, 100) + }) + + it('emits error and client if an idle client in the pool hits an error', function (done) { + const pool = new Pool() + pool.connect(function (err, client) { + expect(err).to.equal(undefined) + client.release() + setImmediate(function () { + client.emit('error', new Error('problem')) + }) + pool.once('error', function (err, errClient) { + expect(err.message).to.equal('problem') + expect(errClient).to.equal(client) + done() + }) + }) + }) +}) + +function mockClient(methods) { + return function () { + const client = new EventEmitter() + Object.assign(client, methods) + return client + } +} diff --git a/reverse_engineering/node_modules/pg-pool/test/idle-timeout-exit.js b/reverse_engineering/node_modules/pg-pool/test/idle-timeout-exit.js new file mode 100644 index 0000000..1292634 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/idle-timeout-exit.js @@ -0,0 +1,16 @@ +// This test is meant to be spawned from idle-timeout.js +if (module === require.main) { + const allowExitOnIdle = process.env.ALLOW_EXIT_ON_IDLE === '1' + const Pool = require('../index') + + const pool = new Pool({ idleTimeoutMillis: 200, ...(allowExitOnIdle ? { allowExitOnIdle: true } : {}) }) + pool.query('SELECT NOW()', (err, res) => console.log('completed first')) + pool.on('remove', () => { + console.log('removed') + done() + }) + + setTimeout(() => { + pool.query('SELECT * from generate_series(0, 1000)', (err, res) => console.log('completed second')) + }, 50) +} diff --git a/reverse_engineering/node_modules/pg-pool/test/idle-timeout.js b/reverse_engineering/node_modules/pg-pool/test/idle-timeout.js new file mode 100644 index 0000000..0bb0975 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/idle-timeout.js @@ -0,0 +1,118 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const { fork } = require('child_process') +const path = require('path') + +const Pool = require('../') + +const wait = (time) => new Promise((resolve) => setTimeout(resolve, time)) + +describe('idle timeout', () => { + it('should timeout and remove the client', (done) => { + const pool = new Pool({ idleTimeoutMillis: 10 }) + pool.query('SELECT NOW()') + pool.on('remove', () => { + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + done() + }) + }) + + it( + 'times out and removes clients when others are also removed', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 10 }) + const clientA = yield pool.connect() + const clientB = yield pool.connect() + clientA.release() + clientB.release(new Error()) + + const removal = new Promise((resolve) => { + pool.on('remove', () => { + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + resolve() + }) + }) + + const timeout = wait(100).then(() => Promise.reject(new Error('Idle timeout failed to occur'))) + + try { + yield Promise.race([removal, timeout]) + } finally { + pool.end() + } + }) + ) + + it( + 'can remove idle clients and recreate them', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 1 }) + const results = [] + for (var i = 0; i < 20; i++) { + let query = pool.query('SELECT NOW()') + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + results.push(yield query) + yield wait(2) + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + } + expect(results).to.have.length(20) + }) + ) + + it( + 'does not time out clients which are used', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 1 }) + const results = [] + for (var i = 0; i < 20; i++) { + let client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.idleCount).to.equal(0) + yield wait(10) + results.push(yield client.query('SELECT NOW()')) + client.release() + expect(pool.idleCount).to.equal(1) + expect(pool.totalCount).to.equal(1) + } + expect(results).to.have.length(20) + return pool.end() + }) + ) + + it('unrefs the connections and timeouts so the program can exit when idle when the allowExitOnIdle option is set', function (done) { + const child = fork(path.join(__dirname, 'idle-timeout-exit.js'), [], { + silent: true, + env: { ...process.env, ALLOW_EXIT_ON_IDLE: '1' }, + }) + let result = '' + child.stdout.setEncoding('utf8') + child.stdout.on('data', (chunk) => (result += chunk)) + child.on('error', (err) => done(err)) + child.on('close', () => { + expect(result).to.equal('completed first\ncompleted second\n') + done() + }) + }) + + it('keeps old behavior when allowExitOnIdle option is not set', function (done) { + const child = fork(path.join(__dirname, 'idle-timeout-exit.js'), [], { + silent: true, + }) + let result = '' + child.stdout.setEncoding('utf8') + child.stdout.on('data', (chunk) => (result += chunk)) + child.on('error', (err) => done(err)) + child.on('close', () => { + expect(result).to.equal('completed first\ncompleted second\nremoved\n') + done() + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/index.js b/reverse_engineering/node_modules/pg-pool/test/index.js new file mode 100644 index 0000000..57a68e0 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/index.js @@ -0,0 +1,226 @@ +'use strict' +const expect = require('expect.js') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool', function () { + describe('with callbacks', function () { + it('works totally unconfigured', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + if (err) return done(err) + client.query('SELECT NOW()', function (err, res) { + release() + if (err) return done(err) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + + it('passes props to clients', function (done) { + const pool = new Pool({ binary: true }) + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + expect(client.binary).to.eql(true) + pool.end(done) + }) + }) + + it('can run a query with a callback without parameters', function (done) { + const pool = new Pool() + pool.query('SELECT 1 as num', function (err, res) { + expect(res.rows[0]).to.eql({ num: 1 }) + pool.end(function () { + done(err) + }) + }) + }) + + it('can run a query with a callback', function (done) { + const pool = new Pool() + pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + expect(res.rows[0]).to.eql({ name: 'brianc' }) + pool.end(function () { + done(err) + }) + }) + }) + + it('passes connection errors to callback', function (done) { + const pool = new Pool({ port: 53922 }) + pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + expect(res).to.be(undefined) + expect(err).to.be.an(Error) + // a connection error should not polute the pool with a dead client + expect(pool.totalCount).to.equal(0) + pool.end(function (err) { + done(err) + }) + }) + }) + + it('does not pass client to error callback', function (done) { + const pool = new Pool({ port: 58242 }) + pool.connect(function (err, client, release) { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + expect(release).to.be.a(Function) + pool.end(done) + }) + }) + + it('removes client if it errors in background', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + client.testString = 'foo' + setTimeout(function () { + client.emit('error', new Error('on purpose')) + }, 10) + }) + pool.on('error', function (err) { + expect(err.message).to.be('on purpose') + expect(err.client).to.not.be(undefined) + expect(err.client.testString).to.be('foo') + err.client.connection.stream.on('end', function () { + pool.end(done) + }) + }) + }) + + it('should not change given options', function (done) { + const options = { max: 10 } + const pool = new Pool(options) + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + expect(options).to.eql({ max: 10 }) + pool.end(done) + }) + }) + + it('does not create promises when connecting', function (done) { + const pool = new Pool() + const returnValue = pool.connect(function (err, client, release) { + release() + if (err) return done(err) + pool.end(done) + }) + expect(returnValue).to.be(undefined) + }) + + it('does not create promises when querying', function (done) { + const pool = new Pool() + const returnValue = pool.query('SELECT 1 as num', function (err) { + pool.end(function () { + done(err) + }) + }) + expect(returnValue).to.be(undefined) + }) + + it('does not create promises when ending', function (done) { + const pool = new Pool() + const returnValue = pool.end(done) + expect(returnValue).to.be(undefined) + }) + + it('never calls callback syncronously', function (done) { + const pool = new Pool() + pool.connect((err, client) => { + if (err) throw err + client.release() + setImmediate(() => { + let called = false + pool.connect((err, client) => { + if (err) throw err + called = true + client.release() + setImmediate(() => { + pool.end(done) + }) + }) + expect(called).to.equal(false) + }) + }) + }) + }) + + describe('with promises', function () { + it('connects, queries, and disconnects', function () { + const pool = new Pool() + return pool.connect().then(function (client) { + return client.query('select $1::text as name', ['hi']).then(function (res) { + expect(res.rows).to.eql([{ name: 'hi' }]) + client.release() + return pool.end() + }) + }) + }) + + it('executes a query directly', () => { + const pool = new Pool() + return pool.query('SELECT $1::text as name', ['hi']).then((res) => { + expect(res.rows).to.have.length(1) + expect(res.rows[0].name).to.equal('hi') + return pool.end() + }) + }) + + it('properly pools clients', function () { + const pool = new Pool({ poolSize: 9 }) + const promises = _.times(30, function () { + return pool.connect().then(function (client) { + return client.query('select $1::text as name', ['hi']).then(function (res) { + client.release() + return res + }) + }) + }) + return Promise.all(promises).then(function (res) { + expect(res).to.have.length(30) + expect(pool.totalCount).to.be(9) + return pool.end() + }) + }) + + it('supports just running queries', function () { + const pool = new Pool({ poolSize: 9 }) + const text = 'select $1::text as name' + const values = ['hi'] + const query = { text: text, values: values } + const promises = _.times(30, () => pool.query(query)) + return Promise.all(promises).then(function (queries) { + expect(queries).to.have.length(30) + return pool.end() + }) + }) + + it('recovers from query errors', function () { + const pool = new Pool() + + const errors = [] + const promises = _.times(30, () => { + return pool.query('SELECT asldkfjasldkf').catch(function (e) { + errors.push(e) + }) + }) + return Promise.all(promises).then(() => { + expect(errors).to.have.length(30) + expect(pool.totalCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + return pool.query('SELECT $1::text as name', ['hi']).then(function (res) { + expect(res.rows).to.eql([{ name: 'hi' }]) + return pool.end() + }) + }) + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/logging.js b/reverse_engineering/node_modules/pg-pool/test/logging.js new file mode 100644 index 0000000..839603b --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/logging.js @@ -0,0 +1,20 @@ +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('logging', function () { + it('logs to supplied log function if given', function () { + const messages = [] + const log = function (msg) { + messages.push(msg) + } + const pool = new Pool({ log: log }) + return pool.query('SELECT NOW()').then(function () { + expect(messages.length).to.be.greaterThan(0) + return pool.end() + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/max-uses.js b/reverse_engineering/node_modules/pg-pool/test/max-uses.js new file mode 100644 index 0000000..c94ddec --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/max-uses.js @@ -0,0 +1,98 @@ +const expect = require('expect.js') +const co = require('co') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('maxUses', () => { + it( + 'can create a single client and use it once', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const res = yield client.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + client.release() + pool.end() + }) + ) + + it( + 'getting a connection a second time returns the same connection and releasing it also closes it', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + client.release() + const client2 = yield pool.connect() + expect(client).to.equal(client2) + expect(client2._ending).to.equal(false) + client2.release() + expect(client2._ending).to.equal(true) + return yield pool.end() + }) + ) + + it( + 'getting a connection a third time returns a new connection', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + client.release() + const client2 = yield pool.connect() + expect(client).to.equal(client2) + client2.release() + const client3 = yield pool.connect() + expect(client3).not.to.equal(client2) + client3.release() + return yield pool.end() + }) + ) + + it( + 'getting a connection from a pending request gets a fresh client when the released candidate is expended', + co.wrap(function* () { + const pool = new Pool({ max: 1, maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client1 = yield pool.connect() + pool.connect().then((client2) => { + expect(client2).to.equal(client1) + expect(pool.waitingCount).to.equal(1) + // Releasing the client this time should also expend it since maxUses is 2, causing client3 to be a fresh client + client2.release() + }) + const client3Promise = pool.connect().then((client3) => { + // client3 should be a fresh client since client2's release caused the first client to be expended + expect(pool.waitingCount).to.equal(0) + expect(client3).not.to.equal(client1) + return client3.release() + }) + // There should be two pending requests since we have 3 connect requests but a max size of 1 + expect(pool.waitingCount).to.equal(2) + // Releasing the client should not yet expend it since maxUses is 2 + client1.release() + yield client3Promise + return yield pool.end() + }) + ) + + it( + 'logs when removing an expended client', + co.wrap(function* () { + const messages = [] + const log = function (msg) { + messages.push(msg) + } + const pool = new Pool({ maxUses: 1, log }) + const client = yield pool.connect() + client.release() + expect(messages).to.contain('remove expended client') + return yield pool.end() + }) + ) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/releasing-clients.js b/reverse_engineering/node_modules/pg-pool/test/releasing-clients.js new file mode 100644 index 0000000..da8e09c --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/releasing-clients.js @@ -0,0 +1,54 @@ +const Pool = require('../') + +const expect = require('expect.js') +const net = require('net') + +describe('releasing clients', () => { + it('removes a client which cannot be queried', async () => { + // make a pool w/ only 1 client + const pool = new Pool({ max: 1 }) + expect(pool.totalCount).to.eql(0) + const client = await pool.connect() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + // reach into the client and sever its connection + client.connection.end() + + // wait for the client to error out + const err = await new Promise((resolve) => client.once('error', resolve)) + expect(err).to.be.ok() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + + // try to return it to the pool - this removes it because its broken + client.release() + expect(pool.totalCount).to.eql(0) + expect(pool.idleCount).to.eql(0) + + // make sure pool still works + const { rows } = await pool.query('SELECT NOW()') + expect(rows).to.have.length(1) + await pool.end() + }) + + it('removes a client which is ending', async () => { + // make a pool w/ only 1 client + const pool = new Pool({ max: 1 }) + expect(pool.totalCount).to.eql(0) + const client = await pool.connect() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + // end the client gracefully (but you shouldn't do this with pooled clients) + client.end() + + // try to return it to the pool + client.release() + expect(pool.totalCount).to.eql(0) + expect(pool.idleCount).to.eql(0) + + // make sure pool still works + const { rows } = await pool.query('SELECT NOW()') + expect(rows).to.have.length(1) + await pool.end() + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/setup.js b/reverse_engineering/node_modules/pg-pool/test/setup.js new file mode 100644 index 0000000..811e956 --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/setup.js @@ -0,0 +1,10 @@ +const crash = (reason) => { + process.on(reason, (err) => { + console.error(reason, err.stack) + process.exit(-1) + }) +} + +crash('unhandledRejection') +crash('uncaughtError') +crash('warning') diff --git a/reverse_engineering/node_modules/pg-pool/test/sizing.js b/reverse_engineering/node_modules/pg-pool/test/sizing.js new file mode 100644 index 0000000..e7863ba --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/sizing.js @@ -0,0 +1,58 @@ +const expect = require('expect.js') +const co = require('co') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool size of 1', () => { + it( + 'can create a single client and use it once', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const res = yield client.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + client.release() + pool.end() + }) + ) + + it( + 'can create a single client and use it multiple times', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const wait = pool.connect() + expect(pool.waitingCount).to.equal(1) + client.release() + const client2 = yield wait + expect(client).to.equal(client2) + client2.release() + return yield pool.end() + }) + ) + + it( + 'can only send 1 query at a time', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + + // the query text column name changed in PostgreSQL 9.2 + const versionResult = yield pool.query('SHOW server_version_num') + const version = parseInt(versionResult.rows[0].server_version_num, 10) + const queryColumn = version < 90200 ? 'current_query' : 'query' + + const queryText = 'SELECT COUNT(*) as counts FROM pg_stat_activity WHERE ' + queryColumn + ' = $1' + const queries = _.times(20, () => pool.query(queryText, [queryText])) + const results = yield Promise.all(queries) + const counts = results.map((res) => parseInt(res.rows[0].counts, 10)) + expect(counts).to.eql(_.times(20, (i) => 1)) + return yield pool.end() + }) + ) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/submittable.js b/reverse_engineering/node_modules/pg-pool/test/submittable.js new file mode 100644 index 0000000..7a1574d --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/submittable.js @@ -0,0 +1,19 @@ +'use strict' +const Cursor = require('pg-cursor') +const expect = require('expect.js') +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('submittle', () => { + it('is returned from the query method', false, (done) => { + const pool = new Pool() + const cursor = pool.query(new Cursor('SELECT * from generate_series(0, 1000)')) + cursor.read((err, rows) => { + expect(err).to.be(undefined) + expect(!!rows).to.be.ok() + cursor.close(done) + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-pool/test/timeout.js b/reverse_engineering/node_modules/pg-pool/test/timeout.js new file mode 100644 index 0000000..e69de29 diff --git a/reverse_engineering/node_modules/pg-pool/test/verify.js b/reverse_engineering/node_modules/pg-pool/test/verify.js new file mode 100644 index 0000000..9331e1a --- /dev/null +++ b/reverse_engineering/node_modules/pg-pool/test/verify.js @@ -0,0 +1,24 @@ +'use strict' +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('verify', () => { + it('verifies a client with a callback', (done) => { + const pool = new Pool({ + verify: (client, cb) => { + cb(new Error('nope')) + }, + }) + + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(err.message).to.be('nope') + pool.end() + done() + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-protocol/LICENSE b/reverse_engineering/node_modules/pg-protocol/LICENSE new file mode 100644 index 0000000..5c14056 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2021 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/reverse_engineering/node_modules/pg-protocol/README.md b/reverse_engineering/node_modules/pg-protocol/README.md new file mode 100644 index 0000000..8c52e40 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/README.md @@ -0,0 +1,3 @@ +# pg-protocol + +Low level postgres wire protocol parser and serializer written in Typescript. Used by node-postgres. Needs more documentation. :smile: diff --git a/reverse_engineering/node_modules/pg-protocol/dist/b.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/b.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/b.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/b.js b/reverse_engineering/node_modules/pg-protocol/dist/b.js new file mode 100644 index 0000000..5f5efb8 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/b.js @@ -0,0 +1,25 @@ +"use strict"; +// file for microbenchmarking +Object.defineProperty(exports, "__esModule", { value: true }); +const buffer_writer_1 = require("./buffer-writer"); +const buffer_reader_1 = require("./buffer-reader"); +const LOOPS = 1000; +let count = 0; +let start = Date.now(); +const writer = new buffer_writer_1.Writer(); +const reader = new buffer_reader_1.BufferReader(); +const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]); +const run = () => { + if (count > LOOPS) { + console.log(Date.now() - start); + return; + } + count++; + for (let i = 0; i < LOOPS; i++) { + reader.setBuffer(0, buffer); + reader.cstring(); + } + setImmediate(run); +}; +run(); +//# sourceMappingURL=b.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/b.js.map b/reverse_engineering/node_modules/pg-protocol/dist/b.js.map new file mode 100644 index 0000000..cddd15e --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/b.js.map @@ -0,0 +1 @@ +{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAAwC;AAExC,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,IAAI,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;AACtB,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QAC/B,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.d.ts new file mode 100644 index 0000000..8970d77 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.d.ts @@ -0,0 +1,14 @@ +/// +export declare class BufferReader { + private offset; + private buffer; + private encoding; + constructor(offset?: number); + setBuffer(offset: number, buffer: Buffer): void; + int16(): number; + byte(): number; + int32(): number; + string(length: number): string; + cstring(): string; + bytes(length: number): Buffer; +} diff --git a/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js b/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js new file mode 100644 index 0000000..ef633b1 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BufferReader = void 0; +const emptyBuffer = Buffer.allocUnsafe(0); +class BufferReader { + constructor(offset = 0) { + this.offset = offset; + this.buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding? + this.encoding = 'utf-8'; + } + setBuffer(offset, buffer) { + this.offset = offset; + this.buffer = buffer; + } + int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + string(length) { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); + this.offset += length; + return result; + } + cstring() { + const start = this.offset; + let end = start; + while (this.buffer[end++] !== 0) { } + this.offset = end; + return this.buffer.toString(this.encoding, start, end - 1); + } + bytes(length) { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result; + } +} +exports.BufferReader = BufferReader; +//# sourceMappingURL=buffer-reader.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js.map b/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js.map new file mode 100644 index 0000000..04d5b1d --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/buffer-reader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAEzC,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,WAAW,CAAA;QAEpC,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlDD,oCAkDC"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.d.ts new file mode 100644 index 0000000..4ac41e6 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.d.ts @@ -0,0 +1,16 @@ +/// +export declare class Writer { + private size; + private buffer; + private offset; + private headerPosition; + constructor(size?: number); + private ensure; + addInt32(num: number): Writer; + addInt16(num: number): Writer; + addCString(string: string): Writer; + addString(string?: string): Writer; + add(otherBuffer: Buffer): Writer; + private join; + flush(code?: number): Buffer; +} diff --git a/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js b/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js new file mode 100644 index 0000000..16fd616 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js @@ -0,0 +1,81 @@ +"use strict"; +//binary data writer tuned for encoding binary specific to the postgres binary protocol +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Writer = void 0; +class Writer { + constructor(size = 256) { + this.size = size; + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(size); + } + ensure(size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.allocUnsafe(newSize); + oldBuffer.copy(this.buffer); + } + } + addInt32(num) { + this.ensure(4); + this.buffer[this.offset++] = (num >>> 24) & 0xff; + this.buffer[this.offset++] = (num >>> 16) & 0xff; + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addInt16(num) { + this.ensure(2); + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addCString(string) { + if (!string) { + this.ensure(1); + } + else { + var len = Buffer.byteLength(string); + this.ensure(len + 1); // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8'); + this.offset += len; + } + this.buffer[this.offset++] = 0; // null terminator + return this; + } + addString(string = '') { + var len = Buffer.byteLength(string); + this.ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; + } + add(otherBuffer) { + this.ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; + } + join(code) { + if (code) { + this.buffer[this.headerPosition] = code; + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1); + this.buffer.writeInt32BE(length, this.headerPosition + 1); + } + return this.buffer.slice(code ? 0 : 5, this.offset); + } + flush(code) { + var result = this.join(code); + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(this.size); + return result; + } +} +exports.Writer = Writer; +//# sourceMappingURL=buffer-writer.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js.map b/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js.map new file mode 100644 index 0000000..fc6e650 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/buffer-writer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAChD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC3B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YAC/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;YACzC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACnC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js b/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js new file mode 100644 index 0000000..3423c96 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js @@ -0,0 +1,511 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const test_buffers_1 = __importDefault(require("./testing/test-buffers")); +const buffer_list_1 = __importDefault(require("./testing/buffer-list")); +const _1 = require("."); +const assert_1 = __importDefault(require("assert")); +const stream_1 = require("stream"); +var authOkBuffer = test_buffers_1.default.authenticationOk(); +var paramStatusBuffer = test_buffers_1.default.parameterStatus('client_encoding', 'UTF8'); +var readyForQueryBuffer = test_buffers_1.default.readyForQuery(); +var backendKeyDataBuffer = test_buffers_1.default.backendKeyData(1, 2); +var commandCompleteBuffer = test_buffers_1.default.commandComplete('SELECT 3'); +var parseCompleteBuffer = test_buffers_1.default.parseComplete(); +var bindCompleteBuffer = test_buffers_1.default.bindComplete(); +var portalSuspendedBuffer = test_buffers_1.default.portalSuspended(); +var addRow = function (bufferList, name, offset) { + return bufferList + .addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0); // format code, 0 => text +}; +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0, +}; +var oneRowDescBuff = test_buffers_1.default.rowDescription([row1]); +row1.name = 'bang'; +var twoRowBuf = test_buffers_1.default.rowDescription([ + row1, + { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0, + }, +]); +var emptyRowFieldBuf = new buffer_list_1.default().addInt16(0).join(true, 'D'); +var emptyRowFieldBuf = test_buffers_1.default.dataRow([]); +var oneFieldBuf = new buffer_list_1.default() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D'); +var oneFieldBuf = test_buffers_1.default.dataRow(['test']); +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8, +}; +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25, +}; +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2, +}; +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I', +}; +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3', +}; +var emptyRowDescriptionBuffer = new buffer_list_1.default() + .addInt16(0) // number of fields + .join(true, 'T'); +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +}; +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [ + { + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + ], +}; +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [ + { + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text', + }, + ], +}; +var emptyParameterDescriptionBuffer = new buffer_list_1.default() + .addInt16(0) // number of parameters + .join(true, 't'); +var oneParameterDescBuf = test_buffers_1.default.parameterDescription([1111]); +var twoParameterDescBuf = test_buffers_1.default.parameterDescription([2222, 3333]); +var expectedEmptyParameterDescriptionMessage = { + name: 'parameterDescription', + length: 6, + parameterCount: 0, + dataTypeIDs: [], +}; +var expectedOneParameterMessage = { + name: 'parameterDescription', + length: 10, + parameterCount: 1, + dataTypeIDs: [1111], +}; +var expectedTwoParameterMessage = { + name: 'parameterDescription', + length: 14, + parameterCount: 2, + dataTypeIDs: [2222, 3333], +}; +var testForMessage = function (buffer, expectedMessage) { + it('recieves and parses ' + expectedMessage.name, () => __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([buffer]); + const [lastMessage] = messages; + for (const key in expectedMessage) { + assert_1.default.deepEqual(lastMessage[key], expectedMessage[key]); + } + })); +}; +var plainPasswordBuffer = test_buffers_1.default.authenticationCleartextPassword(); +var md5PasswordBuffer = test_buffers_1.default.authenticationMD5Password(); +var SASLBuffer = test_buffers_1.default.authenticationSASL(); +var SASLContinueBuffer = test_buffers_1.default.authenticationSASLContinue(); +var SASLFinalBuffer = test_buffers_1.default.authenticationSASLFinal(); +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword', +}; +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]), +}; +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'], +}; +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +}; +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +}; +var notificationResponseBuffer = test_buffers_1.default.notification(4, 'hi', 'boom'); +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom', +}; +const parseBuffers = (buffers) => __awaiter(void 0, void 0, void 0, function* () { + const stream = new stream_1.PassThrough(); + for (const buffer of buffers) { + stream.write(buffer); + } + stream.end(); + const msgs = []; + yield _1.parse(stream, (msg) => msgs.push(msg)); + return msgs; +}); +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage); + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage); + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage); + testForMessage(SASLBuffer, expectedSASLMessage); + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage); + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]); + testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage); + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage); + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]); + testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage); + testForMessage(paramStatusBuffer, expectedParameterStatusMessage); + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage); + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage); + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage); + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage); + testForMessage(test_buffers_1.default.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }); + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData', + }); + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage); + testForMessage(oneRowDescBuff, expectedOneRowMessage); + testForMessage(twoRowBuf, expectedTwoRowMessage); + }); + describe('parameterDescription messages', function () { + testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage); + testForMessage(oneParameterDescBuf, expectedOneParameterMessage); + testForMessage(twoParameterDescBuf, expectedTwoParameterMessage); + }); + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0, + }); + }); + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'], + }); + }); + }); + describe('notice message', function () { + // this uses the same logic as error message + var buff = test_buffers_1.default.notice([{ type: 'C', value: 'code' }]); + testForMessage(buff, { + name: 'notice', + code: 'code', + }); + }); + testForMessage(test_buffers_1.default.error([]), { + name: 'error', + }); + describe('with all the fields', function () { + var buffer = test_buffers_1.default.error([ + { + type: 'S', + value: 'ERROR', + }, + { + type: 'C', + value: 'code', + }, + { + type: 'M', + value: 'message', + }, + { + type: 'D', + value: 'details', + }, + { + type: 'H', + value: 'hint', + }, + { + type: 'P', + value: '100', + }, + { + type: 'p', + value: '101', + }, + { + type: 'q', + value: 'query', + }, + { + type: 'W', + value: 'where', + }, + { + type: 'F', + value: 'file', + }, + { + type: 'L', + value: 'line', + }, + { + type: 'R', + value: 'routine', + }, + { + type: 'Z', + value: 'alsdkf', + }, + ]); + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine', + }); + }); + testForMessage(parseCompleteBuffer, { + name: 'parseComplete', + }); + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }); + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }); + testForMessage(test_buffers_1.default.closeComplete(), { + name: 'closeComplete', + }); + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended', + }); + }); + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4, + }); + }); + describe('copy', () => { + testForMessage(test_buffers_1.default.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [], + }); + testForMessage(test_buffers_1.default.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1], + }); + testForMessage(test_buffers_1.default.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [], + }); + testForMessage(test_buffers_1.default.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2], + }); + testForMessage(test_buffers_1.default.copyDone(), { + name: 'copyDone', + length: 4, + }); + testForMessage(test_buffers_1.default.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]), + }); + }); + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = test_buffers_1.default.dataRow([null, 'bang', 'zug zug', null, '!']); + it('parses when full buffer comes in', function () { + return __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([fullBuffer]); + const message = messages[0]; + assert_1.default.equal(message.fields.length, 5); + assert_1.default.equal(message.fields[0], null); + assert_1.default.equal(message.fields[1], 'bang'); + assert_1.default.equal(message.fields[2], 'zug zug'); + assert_1.default.equal(message.fields[3], null); + assert_1.default.equal(message.fields[4], '!'); + }); + }); + var testMessageRecievedAfterSpiltAt = function (split) { + return __awaiter(this, void 0, void 0, function* () { + var firstBuffer = Buffer.alloc(fullBuffer.length - split); + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length); + fullBuffer.copy(firstBuffer, 0, 0); + fullBuffer.copy(secondBuffer, 0, firstBuffer.length); + const messages = yield parseBuffers([fullBuffer]); + const message = messages[0]; + assert_1.default.equal(message.fields.length, 5); + assert_1.default.equal(message.fields[0], null); + assert_1.default.equal(message.fields[1], 'bang'); + assert_1.default.equal(message.fields[2], 'zug zug'); + assert_1.default.equal(message.fields[3], null); + assert_1.default.equal(message.fields[4], '!'); + }); + }; + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6); + }); + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2); + }); + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2); + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1); + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5); + }); + }); + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = test_buffers_1.default.dataRow(['!']); + var readyForQueryBuffer = test_buffers_1.default.readyForQuery(); + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length); + dataRowBuffer.copy(fullBuffer, 0, 0); + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0); + var verifyMessages = function (messages) { + assert_1.default.strictEqual(messages.length, 2); + assert_1.default.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'], + }); + assert_1.default.equal(messages[0].fields[0], '!'); + assert_1.default.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I', + }); + }; + // sanity check + it('recieves both messages when packet is not split', function () { + return __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([fullBuffer]); + verifyMessages(messages); + }); + }); + var splitAndVerifyTwoMessages = function (split) { + return __awaiter(this, void 0, void 0, function* () { + var firstBuffer = Buffer.alloc(fullBuffer.length - split); + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length); + fullBuffer.copy(firstBuffer, 0, 0); + fullBuffer.copy(secondBuffer, 0, firstBuffer.length); + const messages = yield parseBuffers([firstBuffer, secondBuffer]); + verifyMessages(messages); + }); + }; + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11); + }); + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6), + ]); + }); + it('at the end', function () { + return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]); + }); + }); + }); +}); +//# sourceMappingURL=inbound-parser.test.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js.map b/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js.map new file mode 100644 index 0000000..b026ac4 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/inbound-parser.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inbound-parser.test.js","sourceRoot":"","sources":["../src/inbound-parser.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,0EAA4C;AAC5C,wEAA8C;AAC9C,wBAAyB;AACzB,oDAA2B;AAC3B,mCAAoC;AAGpC,IAAI,YAAY,GAAG,sBAAO,CAAC,gBAAgB,EAAE,CAAA;AAC7C,IAAI,iBAAiB,GAAG,sBAAO,CAAC,eAAe,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;AAC1E,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACjD,IAAI,oBAAoB,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;AACvD,IAAI,qBAAqB,GAAG,sBAAO,CAAC,eAAe,CAAC,UAAU,CAAC,CAAA;AAC/D,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACjD,IAAI,kBAAkB,GAAG,sBAAO,CAAC,YAAY,EAAE,CAAA;AAC/C,IAAI,qBAAqB,GAAG,sBAAO,CAAC,eAAe,EAAE,CAAA;AAErD,IAAI,MAAM,GAAG,UAAU,UAAsB,EAAE,IAAY,EAAE,MAAc;IACzE,OAAO,UAAU;SACd,UAAU,CAAC,IAAI,CAAC,CAAC,aAAa;SAC9B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,WAAW;SAC9B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,6BAA6B;SAChD,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gCAAgC;SACnD,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gBAAgB;SACnC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gBAAgB;SACnC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,yBAAyB;AAC1C,CAAC,CAAA;AAED,IAAI,IAAI,GAAG;IACT,IAAI,EAAE,IAAI;IACV,OAAO,EAAE,CAAC;IACV,eAAe,EAAE,CAAC;IAClB,UAAU,EAAE,CAAC;IACb,YAAY,EAAE,CAAC;IACf,YAAY,EAAE,CAAC;IACf,UAAU,EAAE,CAAC;CACd,CAAA;AACD,IAAI,cAAc,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AACnD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAA;AAElB,IAAI,SAAS,GAAG,sBAAO,CAAC,cAAc,CAAC;IACrC,IAAI;IACJ;QACE,IAAI,EAAE,OAAO;QACb,OAAO,EAAE,EAAE;QACX,eAAe,EAAE,EAAE;QACnB,UAAU,EAAE,EAAE;QACd,YAAY,EAAE,EAAE;QAChB,YAAY,EAAE,EAAE;QAChB,UAAU,EAAE,CAAC;KACd;CACF,CAAC,CAAA;AAEF,IAAI,gBAAgB,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAEnE,IAAI,gBAAgB,GAAG,sBAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;AAE1C,IAAI,WAAW,GAAG,IAAI,qBAAU,EAAE;KAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,4BAA4B;KACxC,UAAU,CAAC,MAAM,CAAC;KAClB,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,WAAW,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;AAE3C,IAAI,iCAAiC,GAAG;IACtC,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAED,IAAI,8BAA8B,GAAG;IACnC,IAAI,EAAE,iBAAiB;IACvB,aAAa,EAAE,iBAAiB;IAChC,cAAc,EAAE,MAAM;IACtB,MAAM,EAAE,EAAE;CACX,CAAA;AAED,IAAI,6BAA6B,GAAG;IAClC,IAAI,EAAE,gBAAgB;IACtB,SAAS,EAAE,CAAC;IACZ,SAAS,EAAE,CAAC;CACb,CAAA;AAED,IAAI,4BAA4B,GAAG;IACjC,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;IACT,MAAM,EAAE,GAAG;CACZ,CAAA;AAED,IAAI,8BAA8B,GAAG;IACnC,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,EAAE;IACV,IAAI,EAAE,UAAU;CACjB,CAAA;AACD,IAAI,yBAAyB,GAAG,IAAI,qBAAU,EAAE;KAC7C,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,kCAAkC,GAAG;IACvC,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,CAAC;IACT,UAAU,EAAE,CAAC;IACb,MAAM,EAAE,EAAE;CACX,CAAA;AACD,IAAI,qBAAqB,GAAG;IAC1B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,IAAI;YACV,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,IAAI,qBAAqB,GAAG;IAC1B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;QACD;YACE,IAAI,EAAE,OAAO;YACb,OAAO,EAAE,EAAE;YACX,QAAQ,EAAE,EAAE;YACZ,UAAU,EAAE,EAAE;YACd,YAAY,EAAE,EAAE;YAChB,gBAAgB,EAAE,EAAE;YACpB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,IAAI,+BAA+B,GAAG,IAAI,qBAAU,EAAE;KACnD,QAAQ,CAAC,CAAC,CAAC,CAAC,uBAAuB;KACnC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,mBAAmB,GAAG,sBAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AAE9D,IAAI,mBAAmB,GAAG,sBAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEpE,IAAI,wCAAwC,GAAG;IAC7C,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,CAAC;IACT,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,EAAE;CAChB,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,EAAE;IACV,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,CAAC,IAAI,CAAC;CACpB,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,EAAE;IACV,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC;CAC1B,CAAA;AAED,IAAI,cAAc,GAAG,UAAU,MAAc,EAAE,eAAoB;IACjE,EAAE,CAAC,sBAAsB,GAAG,eAAe,CAAC,IAAI,EAAE,GAAS,EAAE;QAC3D,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAA;QAE9B,KAAK,MAAM,GAAG,IAAI,eAAe,EAAE;YACjC,gBAAM,CAAC,SAAS,CAAE,WAAmB,CAAC,GAAG,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,CAAA;SAClE;IACH,CAAC,CAAA,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,IAAI,mBAAmB,GAAG,sBAAO,CAAC,+BAA+B,EAAE,CAAA;AACnE,IAAI,iBAAiB,GAAG,sBAAO,CAAC,yBAAyB,EAAE,CAAA;AAC3D,IAAI,UAAU,GAAG,sBAAO,CAAC,kBAAkB,EAAE,CAAA;AAC7C,IAAI,kBAAkB,GAAG,sBAAO,CAAC,0BAA0B,EAAE,CAAA;AAC7D,IAAI,eAAe,GAAG,sBAAO,CAAC,uBAAuB,EAAE,CAAA;AAEvD,IAAI,4BAA4B,GAAG;IACjC,IAAI,EAAE,iCAAiC;CACxC,CAAA;AAED,IAAI,0BAA0B,GAAG;IAC/B,IAAI,EAAE,2BAA2B;IACjC,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;CAChC,CAAA;AAED,IAAI,mBAAmB,GAAG;IACxB,IAAI,EAAE,oBAAoB;IAC1B,UAAU,EAAE,CAAC,eAAe,CAAC;CAC9B,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,4BAA4B;IAClC,IAAI,EAAE,MAAM;CACb,CAAA;AAED,IAAI,wBAAwB,GAAG;IAC7B,IAAI,EAAE,yBAAyB;IAC/B,IAAI,EAAE,MAAM;CACb,CAAA;AAED,IAAI,0BAA0B,GAAG,sBAAO,CAAC,YAAY,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;AACtE,IAAI,mCAAmC,GAAG;IACxC,IAAI,EAAE,cAAc;IACpB,SAAS,EAAE,CAAC;IACZ,OAAO,EAAE,IAAI;IACb,OAAO,EAAE,MAAM;CAChB,CAAA;AAED,MAAM,YAAY,GAAG,CAAO,OAAiB,EAA6B,EAAE;IAC1E,MAAM,MAAM,GAAG,IAAI,oBAAW,EAAE,CAAA;IAChC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;QAC5B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;KACrB;IACD,MAAM,CAAC,GAAG,EAAE,CAAA;IACZ,MAAM,IAAI,GAAqB,EAAE,CAAA;IACjC,MAAM,QAAK,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;IAC5C,OAAO,IAAI,CAAA;AACb,CAAC,CAAA,CAAA;AAED,QAAQ,CAAC,gBAAgB,EAAE;IACzB,cAAc,CAAC,YAAY,EAAE,iCAAiC,CAAC,CAAA;IAC/D,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,iBAAiB,EAAE,0BAA0B,CAAC,CAAA;IAC7D,cAAc,CAAC,UAAU,EAAE,mBAAmB,CAAC,CAAA;IAC/C,cAAc,CAAC,kBAAkB,EAAE,2BAA2B,CAAC,CAAA;IAE/D,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,0BAA0B,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,kBAAkB,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACjG,cAAc,CAAC,0BAA0B,EAAE,2BAA2B,CAAC,CAAA;IAEvE,cAAc,CAAC,eAAe,EAAE,wBAAwB,CAAC,CAAA;IAEzD,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,uBAAuB,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,eAAe,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC3F,cAAc,CAAC,uBAAuB,EAAE,wBAAwB,CAAC,CAAA;IAEjE,cAAc,CAAC,iBAAiB,EAAE,8BAA8B,CAAC,CAAA;IACjE,cAAc,CAAC,oBAAoB,EAAE,6BAA6B,CAAC,CAAA;IACnE,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,qBAAqB,EAAE,8BAA8B,CAAC,CAAA;IACrE,cAAc,CAAC,0BAA0B,EAAE,mCAAmC,CAAC,CAAA;IAC/E,cAAc,CAAC,sBAAO,CAAC,UAAU,EAAE,EAAE;QACnC,IAAI,EAAE,YAAY;QAClB,MAAM,EAAE,CAAC;KACV,CAAC,CAAA;IAEF,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QAC9C,IAAI,EAAE,QAAQ;KACf,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,cAAc,CAAC,yBAAyB,EAAE,kCAAkC,CAAC,CAAA;QAC7E,cAAc,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAA;QACrD,cAAc,CAAC,SAAS,EAAE,qBAAqB,CAAC,CAAA;IAClD,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,+BAA+B,EAAE;QACxC,cAAc,CAAC,+BAA+B,EAAE,wCAAwC,CAAC,CAAA;QACzF,cAAc,CAAC,mBAAmB,EAAE,2BAA2B,CAAC,CAAA;QAChE,cAAc,CAAC,mBAAmB,EAAE,2BAA2B,CAAC,CAAA;IAClE,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,cAAc,EAAE;QACvB,QAAQ,CAAC,mBAAmB,EAAE;YAC5B,cAAc,CAAC,gBAAgB,EAAE;gBAC/B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;aACd,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QAEF,QAAQ,CAAC,8BAA8B,EAAE;YACvC,cAAc,CAAC,WAAW,EAAE;gBAC1B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,CAAC,MAAM,CAAC;aACjB,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,gBAAgB,EAAE;QACzB,4CAA4C;QAC5C,IAAI,IAAI,GAAG,sBAAO,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,CAAA;QACzD,cAAc,CAAC,IAAI,EAAE;YACnB,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,MAAM;SACb,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;QAChC,IAAI,EAAE,OAAO;KACd,CAAC,CAAA;IAEF,QAAQ,CAAC,qBAAqB,EAAE;QAC9B,IAAI,MAAM,GAAG,sBAAO,CAAC,KAAK,CAAC;YACzB;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,QAAQ;aAChB;SACF,CAAC,CAAA;QAEF,cAAc,CAAC,MAAM,EAAE;YACrB,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,OAAO;YACjB,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;YAClB,MAAM,EAAE,SAAS;YACjB,IAAI,EAAE,MAAM;YACZ,QAAQ,EAAE,KAAK;YACf,gBAAgB,EAAE,KAAK;YACvB,aAAa,EAAE,OAAO;YACtB,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;SACnB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,mBAAmB,EAAE;QAClC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,aAAa,EAAE,EAAE;QACtC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,QAAQ,CAAC,iCAAiC,EAAE;QAC1C,cAAc,CAAC,qBAAqB,EAAE;YACpC,IAAI,EAAE,iBAAiB;SACxB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,kCAAkC,EAAE;QAC3C,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE;YAC1D,IAAI,EAAE,kBAAkB;YACxB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE;QACpB,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC;SACpB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;SACvB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,EAAE,EAAE;YACjC,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;YACvD,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;YACT,KAAK,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC9B,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,uEAAuE;IACvE,uEAAuE;IACvE,yBAAyB;IACzB,QAAQ,CAAC,sCAAsC,EAAE;QAC/C,IAAI,UAAU,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAEtE,EAAE,CAAC,kCAAkC,EAAE;;gBACrC,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAC,CAAA;QAEF,IAAI,+BAA+B,GAAG,UAAgB,KAAa;;gBACjE,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBACzD,IAAI,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACvE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAA;QAED,EAAE,CAAC,iCAAiC,EAAE;YACpC,+BAA+B,CAAC,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,+BAA+B,CAAC,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,gCAAgC,EAAE;YACnC,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;QACxD,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wCAAwC,EAAE;QACjD,IAAI,aAAa,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;QAC1C,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;QACjD,IAAI,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,MAAM,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QAChF,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QACpC,mBAAmB,CAAC,IAAI,CAAC,UAAU,EAAE,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;QAE7D,IAAI,cAAc,GAAG,UAAU,QAAe;YAC5C,gBAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;YACtC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,EAAE;gBACV,MAAM,EAAE,CAAC,GAAG,CAAC;aACd,CAAC,CAAA;YACF,gBAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACxC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,eAAe;gBACrB,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,GAAG;aACZ,CAAC,CAAA;QACJ,CAAC,CAAA;QACD,eAAe;QACf,EAAE,CAAC,iDAAiD,EAAE;;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAC,CAAA;QAEF,IAAI,yBAAyB,GAAG,UAAgB,KAAa;;gBAC3D,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBACzD,IAAI,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACvE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,WAAW,EAAE,YAAY,CAAC,CAAC,CAAA;gBAChE,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAA;QAED,QAAQ,CAAC,6CAA6C,EAAE;YACtD,EAAE,CAAC,eAAe,EAAE;gBAClB,OAAO,yBAAyB,CAAC,EAAE,CAAC,CAAA;YACtC,CAAC,CAAC,CAAA;YACF,EAAE,CAAC,cAAc,EAAE;gBACjB,OAAO,OAAO,CAAC,GAAG,CAAC;oBACjB,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;iBACjD,CAAC,CAAA;YACJ,CAAC,CAAC,CAAA;YAEF,EAAE,CAAC,YAAY,EAAE;gBACf,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAC,CAAC,EAAE,yBAAyB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;YAClF,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/index.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/index.d.ts new file mode 100644 index 0000000..3961def --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/index.d.ts @@ -0,0 +1,6 @@ +/// +import { DatabaseError } from './messages'; +import { serialize } from './serializer'; +import { MessageCallback } from './parser'; +export declare function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise; +export { serialize, DatabaseError }; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/index.js b/reverse_engineering/node_modules/pg-protocol/dist/index.js new file mode 100644 index 0000000..7eca3bf --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/index.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DatabaseError = exports.serialize = exports.parse = void 0; +const messages_1 = require("./messages"); +Object.defineProperty(exports, "DatabaseError", { enumerable: true, get: function () { return messages_1.DatabaseError; } }); +const serializer_1 = require("./serializer"); +Object.defineProperty(exports, "serialize", { enumerable: true, get: function () { return serializer_1.serialize; } }); +const parser_1 = require("./parser"); +function parse(stream, callback) { + const parser = new parser_1.Parser(); + stream.on('data', (buffer) => parser.parse(buffer, callback)); + return new Promise((resolve) => stream.on('end', () => resolve())); +} +exports.parse = parse; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/index.js.map b/reverse_engineering/node_modules/pg-protocol/dist/index.js.map new file mode 100644 index 0000000..5db25b6 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA0D;AAUtC,8FAVK,wBAAa,OAUL;AATjC,6CAAwC;AAS/B,0FATA,sBAAS,OASA;AARlB,qCAAkD;AAElD,SAAgB,KAAK,CAAC,MAA6B,EAAE,QAAyB;IAC5E,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,MAAc,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;IACrE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AACpE,CAAC;AAJD,sBAIC"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/messages.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/messages.d.ts new file mode 100644 index 0000000..f8f2e63 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/messages.d.ts @@ -0,0 +1,162 @@ +/// +export declare type Mode = 'text' | 'binary'; +export declare type MessageName = 'parseComplete' | 'bindComplete' | 'closeComplete' | 'noData' | 'portalSuspended' | 'replicationStart' | 'emptyQuery' | 'copyDone' | 'copyData' | 'rowDescription' | 'parameterDescription' | 'parameterStatus' | 'backendKeyData' | 'notification' | 'readyForQuery' | 'commandComplete' | 'dataRow' | 'copyInResponse' | 'copyOutResponse' | 'authenticationOk' | 'authenticationMD5Password' | 'authenticationCleartextPassword' | 'authenticationSASL' | 'authenticationSASLContinue' | 'authenticationSASLFinal' | 'error' | 'notice'; +export interface BackendMessage { + name: MessageName; + length: number; +} +export declare const parseComplete: BackendMessage; +export declare const bindComplete: BackendMessage; +export declare const closeComplete: BackendMessage; +export declare const noData: BackendMessage; +export declare const portalSuspended: BackendMessage; +export declare const replicationStart: BackendMessage; +export declare const emptyQuery: BackendMessage; +export declare const copyDone: BackendMessage; +interface NoticeOrError { + message: string | undefined; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; +} +export declare class DatabaseError extends Error implements NoticeOrError { + readonly length: number; + readonly name: MessageName; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; + constructor(message: string, length: number, name: MessageName); +} +export declare class CopyDataMessage { + readonly length: number; + readonly chunk: Buffer; + readonly name = "copyData"; + constructor(length: number, chunk: Buffer); +} +export declare class CopyResponse { + readonly length: number; + readonly name: MessageName; + readonly binary: boolean; + readonly columnTypes: number[]; + constructor(length: number, name: MessageName, binary: boolean, columnCount: number); +} +export declare class Field { + readonly name: string; + readonly tableID: number; + readonly columnID: number; + readonly dataTypeID: number; + readonly dataTypeSize: number; + readonly dataTypeModifier: number; + readonly format: Mode; + constructor(name: string, tableID: number, columnID: number, dataTypeID: number, dataTypeSize: number, dataTypeModifier: number, format: Mode); +} +export declare class RowDescriptionMessage { + readonly length: number; + readonly fieldCount: number; + readonly name: MessageName; + readonly fields: Field[]; + constructor(length: number, fieldCount: number); +} +export declare class ParameterDescriptionMessage { + readonly length: number; + readonly parameterCount: number; + readonly name: MessageName; + readonly dataTypeIDs: number[]; + constructor(length: number, parameterCount: number); +} +export declare class ParameterStatusMessage { + readonly length: number; + readonly parameterName: string; + readonly parameterValue: string; + readonly name: MessageName; + constructor(length: number, parameterName: string, parameterValue: string); +} +export declare class AuthenticationMD5Password implements BackendMessage { + readonly length: number; + readonly salt: Buffer; + readonly name: MessageName; + constructor(length: number, salt: Buffer); +} +export declare class BackendKeyDataMessage { + readonly length: number; + readonly processID: number; + readonly secretKey: number; + readonly name: MessageName; + constructor(length: number, processID: number, secretKey: number); +} +export declare class NotificationResponseMessage { + readonly length: number; + readonly processId: number; + readonly channel: string; + readonly payload: string; + readonly name: MessageName; + constructor(length: number, processId: number, channel: string, payload: string); +} +export declare class ReadyForQueryMessage { + readonly length: number; + readonly status: string; + readonly name: MessageName; + constructor(length: number, status: string); +} +export declare class CommandCompleteMessage { + readonly length: number; + readonly text: string; + readonly name: MessageName; + constructor(length: number, text: string); +} +export declare class DataRowMessage { + length: number; + fields: any[]; + readonly fieldCount: number; + readonly name: MessageName; + constructor(length: number, fields: any[]); +} +export declare class NoticeMessage implements BackendMessage, NoticeOrError { + readonly length: number; + readonly message: string | undefined; + constructor(length: number, message: string | undefined); + readonly name = "notice"; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; +} +export {}; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/messages.js b/reverse_engineering/node_modules/pg-protocol/dist/messages.js new file mode 100644 index 0000000..b9f2c44 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/messages.js @@ -0,0 +1,160 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoticeMessage = exports.DataRowMessage = exports.CommandCompleteMessage = exports.ReadyForQueryMessage = exports.NotificationResponseMessage = exports.BackendKeyDataMessage = exports.AuthenticationMD5Password = exports.ParameterStatusMessage = exports.ParameterDescriptionMessage = exports.RowDescriptionMessage = exports.Field = exports.CopyResponse = exports.CopyDataMessage = exports.DatabaseError = exports.copyDone = exports.emptyQuery = exports.replicationStart = exports.portalSuspended = exports.noData = exports.closeComplete = exports.bindComplete = exports.parseComplete = void 0; +exports.parseComplete = { + name: 'parseComplete', + length: 5, +}; +exports.bindComplete = { + name: 'bindComplete', + length: 5, +}; +exports.closeComplete = { + name: 'closeComplete', + length: 5, +}; +exports.noData = { + name: 'noData', + length: 5, +}; +exports.portalSuspended = { + name: 'portalSuspended', + length: 5, +}; +exports.replicationStart = { + name: 'replicationStart', + length: 4, +}; +exports.emptyQuery = { + name: 'emptyQuery', + length: 4, +}; +exports.copyDone = { + name: 'copyDone', + length: 4, +}; +class DatabaseError extends Error { + constructor(message, length, name) { + super(message); + this.length = length; + this.name = name; + } +} +exports.DatabaseError = DatabaseError; +class CopyDataMessage { + constructor(length, chunk) { + this.length = length; + this.chunk = chunk; + this.name = 'copyData'; + } +} +exports.CopyDataMessage = CopyDataMessage; +class CopyResponse { + constructor(length, name, binary, columnCount) { + this.length = length; + this.name = name; + this.binary = binary; + this.columnTypes = new Array(columnCount); + } +} +exports.CopyResponse = CopyResponse; +class Field { + constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) { + this.name = name; + this.tableID = tableID; + this.columnID = columnID; + this.dataTypeID = dataTypeID; + this.dataTypeSize = dataTypeSize; + this.dataTypeModifier = dataTypeModifier; + this.format = format; + } +} +exports.Field = Field; +class RowDescriptionMessage { + constructor(length, fieldCount) { + this.length = length; + this.fieldCount = fieldCount; + this.name = 'rowDescription'; + this.fields = new Array(this.fieldCount); + } +} +exports.RowDescriptionMessage = RowDescriptionMessage; +class ParameterDescriptionMessage { + constructor(length, parameterCount) { + this.length = length; + this.parameterCount = parameterCount; + this.name = 'parameterDescription'; + this.dataTypeIDs = new Array(this.parameterCount); + } +} +exports.ParameterDescriptionMessage = ParameterDescriptionMessage; +class ParameterStatusMessage { + constructor(length, parameterName, parameterValue) { + this.length = length; + this.parameterName = parameterName; + this.parameterValue = parameterValue; + this.name = 'parameterStatus'; + } +} +exports.ParameterStatusMessage = ParameterStatusMessage; +class AuthenticationMD5Password { + constructor(length, salt) { + this.length = length; + this.salt = salt; + this.name = 'authenticationMD5Password'; + } +} +exports.AuthenticationMD5Password = AuthenticationMD5Password; +class BackendKeyDataMessage { + constructor(length, processID, secretKey) { + this.length = length; + this.processID = processID; + this.secretKey = secretKey; + this.name = 'backendKeyData'; + } +} +exports.BackendKeyDataMessage = BackendKeyDataMessage; +class NotificationResponseMessage { + constructor(length, processId, channel, payload) { + this.length = length; + this.processId = processId; + this.channel = channel; + this.payload = payload; + this.name = 'notification'; + } +} +exports.NotificationResponseMessage = NotificationResponseMessage; +class ReadyForQueryMessage { + constructor(length, status) { + this.length = length; + this.status = status; + this.name = 'readyForQuery'; + } +} +exports.ReadyForQueryMessage = ReadyForQueryMessage; +class CommandCompleteMessage { + constructor(length, text) { + this.length = length; + this.text = text; + this.name = 'commandComplete'; + } +} +exports.CommandCompleteMessage = CommandCompleteMessage; +class DataRowMessage { + constructor(length, fields) { + this.length = length; + this.fields = fields; + this.name = 'dataRow'; + this.fieldCount = fields.length; + } +} +exports.DataRowMessage = DataRowMessage; +class NoticeMessage { + constructor(length, message) { + this.length = length; + this.message = message; + this.name = 'notice'; + } +} +exports.NoticeMessage = NoticeMessage; +//# sourceMappingURL=messages.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/messages.js.map b/reverse_engineering/node_modules/pg-protocol/dist/messages.js.map new file mode 100644 index 0000000..091e5c3 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/messages.js.map @@ -0,0 +1 @@ +{"version":3,"file":"messages.js","sourceRoot":"","sources":["../src/messages.ts"],"names":[],"mappings":";;;AAoCa,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,YAAY,GAAmB;IAC1C,IAAI,EAAE,cAAc;IACpB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,MAAM,GAAmB;IACpC,IAAI,EAAE,QAAQ;IACd,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,eAAe,GAAmB;IAC7C,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,gBAAgB,GAAmB;IAC9C,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,UAAU,GAAmB;IACxC,IAAI,EAAE,YAAY;IAClB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,QAAQ,GAAmB;IACtC,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,CAAC;CACV,CAAA;AAsBD,MAAa,aAAc,SAAQ,KAAK;IAiBtC,YAAY,OAAe,EAAkB,MAAc,EAAkB,IAAiB;QAC5F,KAAK,CAAC,OAAO,CAAC,CAAA;QAD6B,WAAM,GAAN,MAAM,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAa;IAE9F,CAAC;CACF;AApBD,sCAoBC;AAED,MAAa,eAAe;IAE1B,YAA4B,MAAc,EAAkB,KAAa;QAA7C,WAAM,GAAN,MAAM,CAAQ;QAAkB,UAAK,GAAL,KAAK,CAAQ;QADzD,SAAI,GAAG,UAAU,CAAA;IAC2C,CAAC;CAC9E;AAHD,0CAGC;AAED,MAAa,YAAY;IAEvB,YACkB,MAAc,EACd,IAAiB,EACjB,MAAe,EAC/B,WAAmB;QAHH,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;QACjB,WAAM,GAAN,MAAM,CAAS;QAG/B,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAA;IAC3C,CAAC;CACF;AAVD,oCAUC;AAED,MAAa,KAAK;IAChB,YACkB,IAAY,EACZ,OAAe,EACf,QAAgB,EAChB,UAAkB,EAClB,YAAoB,EACpB,gBAAwB,EACxB,MAAY;QANZ,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,aAAQ,GAAR,QAAQ,CAAQ;QAChB,eAAU,GAAV,UAAU,CAAQ;QAClB,iBAAY,GAAZ,YAAY,CAAQ;QACpB,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAM;IAC3B,CAAC;CACL;AAVD,sBAUC;AAED,MAAa,qBAAqB;IAGhC,YAA4B,MAAc,EAAkB,UAAkB;QAAlD,WAAM,GAAN,MAAM,CAAQ;QAAkB,eAAU,GAAV,UAAU,CAAQ;QAF9D,SAAI,GAAgB,gBAAgB,CAAA;QAGlD,IAAI,CAAC,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC1C,CAAC;CACF;AAND,sDAMC;AAED,MAAa,2BAA2B;IAGtC,YAA4B,MAAc,EAAkB,cAAsB;QAAtD,WAAM,GAAN,MAAM,CAAQ;QAAkB,mBAAc,GAAd,cAAc,CAAQ;QAFlE,SAAI,GAAgB,sBAAsB,CAAA;QAGxD,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;IACnD,CAAC;CACF;AAND,kEAMC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,aAAqB,EACrB,cAAsB;QAFtB,WAAM,GAAN,MAAM,CAAQ;QACd,kBAAa,GAAb,aAAa,CAAQ;QACrB,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,iBAAiB,CAAA;IAKlD,CAAC;CACL;AAPD,wDAOC;AAED,MAAa,yBAAyB;IAEpC,YAA4B,MAAc,EAAkB,IAAY;QAA5C,WAAM,GAAN,MAAM,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAQ;QADxD,SAAI,GAAgB,2BAA2B,CAAA;IACY,CAAC;CAC7E;AAHD,8DAGC;AAED,MAAa,qBAAqB;IAEhC,YAA4B,MAAc,EAAkB,SAAiB,EAAkB,SAAiB;QAApF,WAAM,GAAN,MAAM,CAAQ;QAAkB,cAAS,GAAT,SAAS,CAAQ;QAAkB,cAAS,GAAT,SAAS,CAAQ;QADhG,SAAI,GAAgB,gBAAgB,CAAA;IAC+D,CAAC;CACrH;AAHD,sDAGC;AAED,MAAa,2BAA2B;IAEtC,YACkB,MAAc,EACd,SAAiB,EACjB,OAAe,EACf,OAAe;QAHf,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QALjB,SAAI,GAAgB,cAAc,CAAA;IAM/C,CAAC;CACL;AARD,kEAQC;AAED,MAAa,oBAAoB;IAE/B,YAA4B,MAAc,EAAkB,MAAc;QAA9C,WAAM,GAAN,MAAM,CAAQ;QAAkB,WAAM,GAAN,MAAM,CAAQ;QAD1D,SAAI,GAAgB,eAAe,CAAA;IAC0B,CAAC;CAC/E;AAHD,oDAGC;AAED,MAAa,sBAAsB;IAEjC,YAA4B,MAAc,EAAkB,IAAY;QAA5C,WAAM,GAAN,MAAM,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAQ;QADxD,SAAI,GAAgB,iBAAiB,CAAA;IACsB,CAAC;CAC7E;AAHD,wDAGC;AAED,MAAa,cAAc;IAGzB,YAAmB,MAAc,EAAS,MAAa;QAApC,WAAM,GAAN,MAAM,CAAQ;QAAS,WAAM,GAAN,MAAM,CAAO;QADvC,SAAI,GAAgB,SAAS,CAAA;QAE3C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,MAAM,CAAA;IACjC,CAAC;CACF;AAND,wCAMC;AAED,MAAa,aAAa;IACxB,YAA4B,MAAc,EAAkB,OAA2B;QAA3D,WAAM,GAAN,MAAM,CAAQ;QAAkB,YAAO,GAAP,OAAO,CAAoB;QACvE,SAAI,GAAG,QAAQ,CAAA;IAD2D,CAAC;CAkB5F;AAnBD,sCAmBC"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js b/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js new file mode 100644 index 0000000..18d1eab --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js @@ -0,0 +1,248 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __importDefault(require("assert")); +const serializer_1 = require("./serializer"); +const buffer_list_1 = __importDefault(require("./testing/buffer-list")); +describe('serializer', () => { + it('builds startup message', function () { + const actual = serializer_1.serialize.startup({ + user: 'brian', + database: 'bang', + }); + assert_1.default.deepEqual(actual, new buffer_list_1.default() + .addInt16(3) + .addInt16(0) + .addCString('user') + .addCString('brian') + .addCString('database') + .addCString('bang') + .addCString('client_encoding') + .addCString('UTF8') + .addCString('') + .join(true)); + }); + it('builds password message', function () { + const actual = serializer_1.serialize.password('!'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('!').join(true, 'p')); + }); + it('builds request ssl message', function () { + const actual = serializer_1.serialize.requestSsl(); + const expected = new buffer_list_1.default().addInt32(80877103).join(true); + assert_1.default.deepEqual(actual, expected); + }); + it('builds SASLInitialResponseMessage message', function () { + const actual = serializer_1.serialize.sendSASLInitialResponseMessage('mech', 'data'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('mech').addInt32(4).addString('data').join(true, 'p')); + }); + it('builds SCRAMClientFinalMessage message', function () { + const actual = serializer_1.serialize.sendSCRAMClientFinalMessage('data'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addString('data').join(true, 'p')); + }); + it('builds query message', function () { + var txt = 'select * from boom'; + const actual = serializer_1.serialize.query(txt); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString(txt).join(true, 'Q')); + }); + describe('parse message', () => { + it('builds parse message', function () { + const actual = serializer_1.serialize.parse({ text: '!' }); + var expected = new buffer_list_1.default().addCString('').addCString('!').addInt16(0).join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds parse message with named query', function () { + const actual = serializer_1.serialize.parse({ + name: 'boom', + text: 'select * from boom', + types: [], + }); + var expected = new buffer_list_1.default().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + it('with multiple parameters', function () { + const actual = serializer_1.serialize.parse({ + name: 'force', + text: 'select * from bang where name = $1', + types: [1, 2, 3, 4], + }); + var expected = new buffer_list_1.default() + .addCString('force') + .addCString('select * from bang where name = $1') + .addInt16(4) + .addInt32(1) + .addInt32(2) + .addInt32(3) + .addInt32(4) + .join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('bind messages', function () { + it('with no values', function () { + const actual = serializer_1.serialize.bind(); + var expectedBuffer = new buffer_list_1.default() + .addCString('') + .addCString('') + .addInt16(0) + .addInt16(0) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('with named statement, portal, and values', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing')) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + }); + it('with custom valueMapper', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + valueMapper: () => null, + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('with named statement, portal, and buffer value', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, Buffer.from('zing', 'utf8')], + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) // value count + .addInt16(0) // string + .addInt16(0) // string + .addInt16(0) // string + .addInt16(1) // binary + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing', 'utf-8')) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + describe('builds execute message', function () { + it('for unamed portal with no row limit', function () { + const actual = serializer_1.serialize.execute(); + var expectedBuffer = new buffer_list_1.default().addCString('').addInt32(0).join(true, 'E'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('for named portal with row limit', function () { + const actual = serializer_1.serialize.execute({ + portal: 'my favorite portal', + rows: 100, + }); + var expectedBuffer = new buffer_list_1.default().addCString('my favorite portal').addInt32(100).join(true, 'E'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + }); + it('builds flush command', function () { + const actual = serializer_1.serialize.flush(); + var expected = new buffer_list_1.default().join(true, 'H'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds sync command', function () { + const actual = serializer_1.serialize.sync(); + var expected = new buffer_list_1.default().join(true, 'S'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds end command', function () { + const actual = serializer_1.serialize.end(); + var expected = Buffer.from([0x58, 0, 0, 0, 4]); + assert_1.default.deepEqual(actual, expected); + }); + describe('builds describe command', function () { + it('describe statement', function () { + const actual = serializer_1.serialize.describe({ type: 'S', name: 'bang' }); + var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'D'); + assert_1.default.deepEqual(actual, expected); + }); + it('describe unnamed portal', function () { + const actual = serializer_1.serialize.describe({ type: 'P' }); + var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'D'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('builds close command', function () { + it('describe statement', function () { + const actual = serializer_1.serialize.close({ type: 'S', name: 'bang' }); + var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'C'); + assert_1.default.deepEqual(actual, expected); + }); + it('describe unnamed portal', function () { + const actual = serializer_1.serialize.close({ type: 'P' }); + var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'C'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('copy messages', function () { + it('builds copyFromChunk', () => { + const actual = serializer_1.serialize.copyData(Buffer.from([1, 2, 3])); + const expected = new buffer_list_1.default().add(Buffer.from([1, 2, 3])).join(true, 'd'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds copy fail', () => { + const actual = serializer_1.serialize.copyFail('err!'); + const expected = new buffer_list_1.default().addCString('err!').join(true, 'f'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds copy done', () => { + const actual = serializer_1.serialize.copyDone(); + const expected = new buffer_list_1.default().join(true, 'c'); + assert_1.default.deepEqual(actual, expected); + }); + }); + it('builds cancel message', () => { + const actual = serializer_1.serialize.cancel(3, 4); + const expected = new buffer_list_1.default().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true); + assert_1.default.deepEqual(actual, expected); + }); +}); +//# sourceMappingURL=outbound-serializer.test.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js.map b/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js.map new file mode 100644 index 0000000..3dcb1c8 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/outbound-serializer.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"outbound-serializer.test.js","sourceRoot":"","sources":["../src/outbound-serializer.test.ts"],"names":[],"mappings":";;;;;AAAA,oDAA2B;AAC3B,6CAAwC;AACxC,wEAA8C;AAE9C,QAAQ,CAAC,YAAY,EAAE,GAAG,EAAE;IAC1B,EAAE,CAAC,wBAAwB,EAAE;QAC3B,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;YAC/B,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,MAAM;SACjB,CAAC,CAAA;QACF,gBAAM,CAAC,SAAS,CACd,MAAM,EACN,IAAI,qBAAU,EAAE;aACb,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,OAAO,CAAC;aACnB,UAAU,CAAC,UAAU,CAAC;aACtB,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,iBAAiB,CAAC;aAC7B,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,EAAE,CAAC;aACd,IAAI,CAAC,IAAI,CAAC,CACd,CAAA;IACH,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;QACtC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,4BAA4B,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,UAAU,EAAE,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC/D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,2CAA2C,EAAE;QAC9C,MAAM,MAAM,GAAG,sBAAS,CAAC,8BAA8B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QACvE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC7G,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,wCAAwC,EAAE;QAC3C,MAAM,MAAM,GAAG,sBAAS,CAAC,2BAA2B,CAAC,MAAM,CAAC,CAAA;QAC5D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC9E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,IAAI,GAAG,GAAG,oBAAoB,CAAA;QAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACnC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE,GAAG,EAAE;QAC7B,EAAE,CAAC,sBAAsB,EAAE;YACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC1F,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,uCAAuC,EAAE;YAC1C,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,oBAAoB;gBAC1B,KAAK,EAAE,EAAE;aACV,CAAC,CAAA;YACF,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/G,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,OAAO;gBACb,IAAI,EAAE,oCAAoC;gBAC1C,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;aACpB,CAAC,CAAA;YACF,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE;iBAC5B,UAAU,CAAC,OAAO,CAAC;iBACnB,UAAU,CAAC,oCAAoC,CAAC;iBAChD,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,gBAAgB,EAAE;YACnB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;YAE/B,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;iBAClC,UAAU,CAAC,EAAE,CAAC;iBACd,UAAU,CAAC,EAAE,CAAC;iBACd,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0CAA0C,EAAE;YAC7C,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;gBAC5B,MAAM,EAAE,MAAM;gBACd,SAAS,EAAE,KAAK;gBAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;aAClC,CAAC,CAAA;YACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;iBAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;iBACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;iBACnC,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACrB,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;iBACZ,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;iBACxB,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;YACjC,WAAW,EAAE,GAAG,EAAE,CAAC,IAAI;SACxB,CAAC,CAAA;QACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;aAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,gDAAgD,EAAE;QACnD,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACvD,CAAC,CAAA;QACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;aAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC,CAAC,cAAc;aAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aACjC,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wBAAwB,EAAE;QACjC,EAAE,CAAC,qCAAqC,EAAE;YACxC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,EAAE,CAAA;YAClC,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAChF,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,iCAAiC,EAAE;YACpC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;gBAC/B,MAAM,EAAE,oBAAoB;gBAC5B,IAAI,EAAE,GAAG;aACV,CAAC,CAAA;YACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,EAAE,CAAA;QAChC,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAC/C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,qBAAqB,EAAE;QACxB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;QAC/B,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAC/C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,oBAAoB,EAAE;QACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,GAAG,EAAE,CAAA;QAC9B,IAAI,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;QAC9C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC9D,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAChD,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC3E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,sBAAsB,EAAE;QAC/B,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC3D,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC3E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,sBAAsB,EAAE,GAAG,EAAE;YAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACzD,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC7E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YACzC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,EAAE,CAAA;YACnC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,uBAAuB,EAAE,GAAG,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAClG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/parser.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/parser.d.ts new file mode 100644 index 0000000..030d1ef --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/parser.d.ts @@ -0,0 +1,38 @@ +/// +import { TransformOptions } from 'stream'; +import { Mode, BackendMessage } from './messages'; +export declare type Packet = { + code: number; + packet: Buffer; +}; +declare type StreamOptions = TransformOptions & { + mode: Mode; +}; +export declare type MessageCallback = (msg: BackendMessage) => void; +export declare class Parser { + private buffer; + private bufferLength; + private bufferOffset; + private reader; + private mode; + constructor(opts?: StreamOptions); + parse(buffer: Buffer, callback: MessageCallback): void; + private mergeBuffer; + private handlePacket; + private parseReadyForQueryMessage; + private parseCommandCompleteMessage; + private parseCopyData; + private parseCopyInMessage; + private parseCopyOutMessage; + private parseCopyMessage; + private parseNotificationMessage; + private parseRowDescriptionMessage; + private parseField; + private parseParameterDescriptionMessage; + private parseDataRowMessage; + private parseParameterStatusMessage; + private parseBackendKeyData; + parseAuthenticationResponse(offset: number, length: number, bytes: Buffer): any; + private parseErrorMessage; +} +export {}; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/parser.js b/reverse_engineering/node_modules/pg-protocol/dist/parser.js new file mode 100644 index 0000000..d03b637 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/parser.js @@ -0,0 +1,308 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Parser = void 0; +const messages_1 = require("./messages"); +const buffer_reader_1 = require("./buffer-reader"); +const assert_1 = __importDefault(require("assert")); +// every message is prefixed with a single bye +const CODE_LENGTH = 1; +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4; +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH; +const emptyBuffer = Buffer.allocUnsafe(0); +class Parser { + constructor(opts) { + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + this.reader = new buffer_reader_1.BufferReader(); + if ((opts === null || opts === void 0 ? void 0 : opts.mode) === 'binary') { + throw new Error('Binary mode not supported yet'); + } + this.mode = (opts === null || opts === void 0 ? void 0 : opts.mode) || 'text'; + } + parse(buffer, callback) { + this.mergeBuffer(buffer); + const bufferFullLength = this.bufferOffset + this.bufferLength; + let offset = this.bufferOffset; + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset]; + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH); + const fullMessageLength = CODE_LENGTH + length; + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer); + callback(message); + offset += fullMessageLength; + } + else { + break; + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + } + else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset; + this.bufferOffset = offset; + } + } + mergeBuffer(buffer) { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength; + const newFullLength = newLength + this.bufferOffset; + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer; + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer; + } + else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2; + while (newLength >= newBufferLength) { + newBufferLength *= 2; + } + newBuffer = Buffer.allocUnsafe(newBufferLength); + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength); + this.buffer = newBuffer; + this.bufferOffset = 0; + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength); + this.bufferLength = newLength; + } + else { + this.buffer = buffer; + this.bufferOffset = 0; + this.bufferLength = buffer.byteLength; + } + } + handlePacket(offset, code, length, bytes) { + switch (code) { + case 50 /* BindComplete */: + return messages_1.bindComplete; + case 49 /* ParseComplete */: + return messages_1.parseComplete; + case 51 /* CloseComplete */: + return messages_1.closeComplete; + case 110 /* NoData */: + return messages_1.noData; + case 115 /* PortalSuspended */: + return messages_1.portalSuspended; + case 99 /* CopyDone */: + return messages_1.copyDone; + case 87 /* ReplicationStart */: + return messages_1.replicationStart; + case 73 /* EmptyQuery */: + return messages_1.emptyQuery; + case 68 /* DataRow */: + return this.parseDataRowMessage(offset, length, bytes); + case 67 /* CommandComplete */: + return this.parseCommandCompleteMessage(offset, length, bytes); + case 90 /* ReadyForQuery */: + return this.parseReadyForQueryMessage(offset, length, bytes); + case 65 /* NotificationResponse */: + return this.parseNotificationMessage(offset, length, bytes); + case 82 /* AuthenticationResponse */: + return this.parseAuthenticationResponse(offset, length, bytes); + case 83 /* ParameterStatus */: + return this.parseParameterStatusMessage(offset, length, bytes); + case 75 /* BackendKeyData */: + return this.parseBackendKeyData(offset, length, bytes); + case 69 /* ErrorMessage */: + return this.parseErrorMessage(offset, length, bytes, 'error'); + case 78 /* NoticeMessage */: + return this.parseErrorMessage(offset, length, bytes, 'notice'); + case 84 /* RowDescriptionMessage */: + return this.parseRowDescriptionMessage(offset, length, bytes); + case 116 /* ParameterDescriptionMessage */: + return this.parseParameterDescriptionMessage(offset, length, bytes); + case 71 /* CopyIn */: + return this.parseCopyInMessage(offset, length, bytes); + case 72 /* CopyOut */: + return this.parseCopyOutMessage(offset, length, bytes); + case 100 /* CopyData */: + return this.parseCopyData(offset, length, bytes); + default: + assert_1.default.fail(`unknown message code: ${code.toString(16)}`); + } + } + parseReadyForQueryMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const status = this.reader.string(1); + return new messages_1.ReadyForQueryMessage(length, status); + } + parseCommandCompleteMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const text = this.reader.cstring(); + return new messages_1.CommandCompleteMessage(length, text); + } + parseCopyData(offset, length, bytes) { + const chunk = bytes.slice(offset, offset + (length - 4)); + return new messages_1.CopyDataMessage(length, chunk); + } + parseCopyInMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, 'copyInResponse'); + } + parseCopyOutMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse'); + } + parseCopyMessage(offset, length, bytes, messageName) { + this.reader.setBuffer(offset, bytes); + const isBinary = this.reader.byte() !== 0; + const columnCount = this.reader.int16(); + const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount); + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16(); + } + return message; + } + parseNotificationMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processId = this.reader.int32(); + const channel = this.reader.cstring(); + const payload = this.reader.cstring(); + return new messages_1.NotificationResponseMessage(length, processId, channel, payload); + } + parseRowDescriptionMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const message = new messages_1.RowDescriptionMessage(length, fieldCount); + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField(); + } + return message; + } + parseField() { + const name = this.reader.cstring(); + const tableID = this.reader.int32(); + const columnID = this.reader.int16(); + const dataTypeID = this.reader.int32(); + const dataTypeSize = this.reader.int16(); + const dataTypeModifier = this.reader.int32(); + const mode = this.reader.int16() === 0 ? 'text' : 'binary'; + return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode); + } + parseParameterDescriptionMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const parameterCount = this.reader.int16(); + const message = new messages_1.ParameterDescriptionMessage(length, parameterCount); + for (let i = 0; i < parameterCount; i++) { + message.dataTypeIDs[i] = this.reader.int32(); + } + return message; + } + parseDataRowMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const fields = new Array(fieldCount); + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32(); + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len); + } + return new messages_1.DataRowMessage(length, fields); + } + parseParameterStatusMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const name = this.reader.cstring(); + const value = this.reader.cstring(); + return new messages_1.ParameterStatusMessage(length, name, value); + } + parseBackendKeyData(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processID = this.reader.int32(); + const secretKey = this.reader.int32(); + return new messages_1.BackendKeyDataMessage(length, processID, secretKey); + } + parseAuthenticationResponse(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const code = this.reader.int32(); + // TODO(bmc): maybe better types here + const message = { + name: 'authenticationOk', + length, + }; + switch (code) { + case 0: // AuthenticationOk + break; + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = 'authenticationCleartextPassword'; + } + break; + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = 'authenticationMD5Password'; + const salt = this.reader.bytes(4); + return new messages_1.AuthenticationMD5Password(length, salt); + } + break; + case 10: // AuthenticationSASL + message.name = 'authenticationSASL'; + message.mechanisms = []; + let mechanism; + do { + mechanism = this.reader.cstring(); + if (mechanism) { + message.mechanisms.push(mechanism); + } + } while (mechanism); + break; + case 11: // AuthenticationSASLContinue + message.name = 'authenticationSASLContinue'; + message.data = this.reader.string(length - 8); + break; + case 12: // AuthenticationSASLFinal + message.name = 'authenticationSASLFinal'; + message.data = this.reader.string(length - 8); + break; + default: + throw new Error('Unknown authenticationOk message type ' + code); + } + return message; + } + parseErrorMessage(offset, length, bytes, name) { + this.reader.setBuffer(offset, bytes); + const fields = {}; + let fieldType = this.reader.string(1); + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring(); + fieldType = this.reader.string(1); + } + const messageValue = fields.M; + const message = name === 'notice' ? new messages_1.NoticeMessage(length, messageValue) : new messages_1.DatabaseError(messageValue, length, name); + message.severity = fields.S; + message.code = fields.C; + message.detail = fields.D; + message.hint = fields.H; + message.position = fields.P; + message.internalPosition = fields.p; + message.internalQuery = fields.q; + message.where = fields.W; + message.schema = fields.s; + message.table = fields.t; + message.column = fields.c; + message.dataType = fields.d; + message.constraint = fields.n; + message.file = fields.F; + message.line = fields.L; + message.routine = fields.R; + return message; + } +} +exports.Parser = Parser; +//# sourceMappingURL=parser.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/parser.js.map b/reverse_engineering/node_modules/pg-protocol/dist/parser.js.map new file mode 100644 index 0000000..3eb27ad --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/parser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.js","sourceRoot":"","sources":["../src/parser.ts"],"names":[],"mappings":";;;;;;AACA,yCA0BmB;AACnB,mDAA8C;AAC9C,oDAA2B;AAE3B,8CAA8C;AAC9C,MAAM,WAAW,GAAG,CAAC,CAAA;AACrB,mEAAmE;AACnE,qCAAqC;AACrC,MAAM,UAAU,GAAG,CAAC,CAAA;AAEpB,MAAM,aAAa,GAAG,WAAW,GAAG,UAAU,CAAA;AAO9C,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAiCzC,MAAa,MAAM;IAOjB,YAAY,IAAoB;QANxB,WAAM,GAAW,WAAW,CAAA;QAC5B,iBAAY,GAAW,CAAC,CAAA;QACxB,iBAAY,GAAW,CAAC,CAAA;QACxB,WAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;QAIjC,IAAI,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,MAAK,QAAQ,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAA;SACjD;QACD,IAAI,CAAC,IAAI,GAAG,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,KAAI,MAAM,CAAA;IAClC,CAAC;IAEM,KAAK,CAAC,MAAc,EAAE,QAAyB;QACpD,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QACxB,MAAM,gBAAgB,GAAG,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9D,IAAI,MAAM,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9B,OAAO,MAAM,GAAG,aAAa,IAAI,gBAAgB,EAAE;YACjD,uDAAuD;YACvD,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;YAChC,4EAA4E;YAC5E,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,CAAA;YAC7D,MAAM,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAA;YAC9C,IAAI,iBAAiB,GAAG,MAAM,IAAI,gBAAgB,EAAE;gBAClD,MAAM,OAAO,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,GAAG,aAAa,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;gBACpF,QAAQ,CAAC,OAAO,CAAC,CAAA;gBACjB,MAAM,IAAI,iBAAiB,CAAA;aAC5B;iBAAM;gBACL,MAAK;aACN;SACF;QACD,IAAI,MAAM,KAAK,gBAAgB,EAAE;YAC/B,6BAA6B;YAC7B,IAAI,CAAC,MAAM,GAAG,WAAW,CAAA;YACzB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;SACtB;aAAM;YACL,wCAAwC;YACxC,IAAI,CAAC,YAAY,GAAG,gBAAgB,GAAG,MAAM,CAAA;YAC7C,IAAI,CAAC,YAAY,GAAG,MAAM,CAAA;SAC3B;IACH,CAAC;IAEO,WAAW,CAAC,MAAc;QAChC,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YACzB,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;YACvD,MAAM,aAAa,GAAG,SAAS,GAAG,IAAI,CAAC,YAAY,CAAA;YACnD,IAAI,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE;gBAC1C,wDAAwD;gBACxD,IAAI,SAAiB,CAAA;gBACrB,IAAI,SAAS,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,EAAE;oBACjF,kGAAkG;oBAClG,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;iBACxB;qBAAM;oBACL,+BAA+B;oBAC/B,IAAI,eAAe,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,CAAC,CAAA;oBAChD,OAAO,SAAS,IAAI,eAAe,EAAE;wBACnC,eAAe,IAAI,CAAC,CAAA;qBACrB;oBACD,SAAS,GAAG,MAAM,CAAC,WAAW,CAAC,eAAe,CAAC,CAAA;iBAChD;gBACD,2CAA2C;gBAC3C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;gBACxF,IAAI,CAAC,MAAM,GAAG,SAAS,CAAA;gBACvB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;aACtB;YACD,+CAA+C;YAC/C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;YAC/D,IAAI,CAAC,YAAY,GAAG,SAAS,CAAA;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;YACpB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;SACtC;IACH,CAAC;IAEO,YAAY,CAAC,MAAc,EAAE,IAAY,EAAE,MAAc,EAAE,KAAa;QAC9E,QAAQ,IAAI,EAAE;YACZ;gBACE,OAAO,uBAAY,CAAA;YACrB;gBACE,OAAO,wBAAa,CAAA;YACtB;gBACE,OAAO,wBAAa,CAAA;YACtB;gBACE,OAAO,iBAAM,CAAA;YACf;gBACE,OAAO,0BAAe,CAAA;YACxB;gBACE,OAAO,mBAAQ,CAAA;YACjB;gBACE,OAAO,2BAAgB,CAAA;YACzB;gBACE,OAAO,qBAAU,CAAA;YACnB;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC9D;gBACE,OAAO,IAAI,CAAC,wBAAwB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC7D;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAA;YAC/D;gBACE,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,0BAA0B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC/D;gBACE,OAAO,IAAI,CAAC,gCAAgC,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACrE;gBACE,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACvD;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAClD;gBACE,gBAAM,CAAC,IAAI,CAAC,yBAAyB,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,CAAA;SAC5D;IACH,CAAC;IAEO,yBAAyB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC7E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACpC,OAAO,IAAI,+BAAoB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;IAEO,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC/E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,OAAO,IAAI,iCAAsB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IACjD,CAAC;IAEO,aAAa,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACjE,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,0BAAe,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC3C,CAAC;IAEO,kBAAkB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACtE,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,gBAAgB,CAAC,CAAA;IACvE,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,iBAAiB,CAAC,CAAA;IACxE,CAAC;IAEO,gBAAgB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa,EAAE,WAAwB;QAC9F,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QACzC,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACvC,MAAM,OAAO,GAAG,IAAI,uBAAY,CAAC,MAAM,EAAE,WAAW,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAA;QAC5E,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE;YACpC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;SAC7C;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,wBAAwB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC5E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACrC,OAAO,IAAI,sCAA2B,CAAC,MAAM,EAAE,SAAS,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC7E,CAAC;IAEO,0BAA0B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC9E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,OAAO,GAAG,IAAI,gCAAqB,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;QAC7D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;SACtC;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,UAAU;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACxC,MAAM,gBAAgB,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC1D,OAAO,IAAI,gBAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,YAAY,EAAE,gBAAgB,EAAE,IAAI,CAAC,CAAA;IAC7F,CAAC;IAEO,gCAAgC,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACpF,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,cAAc,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAC1C,MAAM,OAAO,GAAG,IAAI,sCAA2B,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;SAC7C;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,MAAM,GAAU,IAAI,KAAK,CAAC,UAAU,CAAC,CAAA;QAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;YAC/B,uDAAuD;YACvD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;SACxD;QACD,OAAO,IAAI,yBAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC3C,CAAC;IAEO,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC/E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACnC,OAAO,IAAI,iCAAsB,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;IACxD,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,OAAO,IAAI,gCAAqB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAA;IAChE,CAAC;IAEM,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC9E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAChC,qCAAqC;QACrC,MAAM,OAAO,GAAyB;YACpC,IAAI,EAAE,kBAAkB;YACxB,MAAM;SACP,CAAA;QAED,QAAQ,IAAI,EAAE;YACZ,KAAK,CAAC,EAAE,mBAAmB;gBACzB,MAAK;YACP,KAAK,CAAC,EAAE,kCAAkC;gBACxC,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;oBACxB,OAAO,CAAC,IAAI,GAAG,iCAAiC,CAAA;iBACjD;gBACD,MAAK;YACP,KAAK,CAAC,EAAE,4BAA4B;gBAClC,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;oBACzB,OAAO,CAAC,IAAI,GAAG,2BAA2B,CAAA;oBAC1C,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;oBACjC,OAAO,IAAI,oCAAyB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;iBACnD;gBACD,MAAK;YACP,KAAK,EAAE,EAAE,qBAAqB;gBAC5B,OAAO,CAAC,IAAI,GAAG,oBAAoB,CAAA;gBACnC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAA;gBACvB,IAAI,SAAiB,CAAA;gBACrB,GAAG;oBACD,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAEjC,IAAI,SAAS,EAAE;wBACb,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;qBACnC;iBACF,QAAQ,SAAS,EAAC;gBACnB,MAAK;YACP,KAAK,EAAE,EAAE,6BAA6B;gBACpC,OAAO,CAAC,IAAI,GAAG,4BAA4B,CAAA;gBAC3C,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC7C,MAAK;YACP,KAAK,EAAE,EAAE,0BAA0B;gBACjC,OAAO,CAAC,IAAI,GAAG,yBAAyB,CAAA;gBACxC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC7C,MAAK;YACP;gBACE,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,IAAI,CAAC,CAAA;SACnE;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,iBAAiB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa,EAAE,IAAiB;QACxF,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,MAAM,GAA2B,EAAE,CAAA;QACzC,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACrC,OAAO,SAAS,KAAK,IAAI,EAAE;YACzB,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;YACzC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAClC;QAED,MAAM,YAAY,GAAG,MAAM,CAAC,CAAC,CAAA;QAE7B,MAAM,OAAO,GACX,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,wBAAa,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC,CAAC,CAAC,IAAI,wBAAa,CAAC,YAAY,EAAE,MAAM,EAAE,IAAI,CAAC,CAAA;QAE7G,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,gBAAgB,GAAG,MAAM,CAAC,CAAC,CAAA;QACnC,OAAO,CAAC,aAAa,GAAG,MAAM,CAAC,CAAC,CAAA;QAChC,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;QACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;QACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,CAAC,CAAA;QAC7B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,CAAC,CAAA;QAC1B,OAAO,OAAO,CAAA;IAChB,CAAC;CACF;AAvTD,wBAuTC"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/serializer.d.ts b/reverse_engineering/node_modules/pg-protocol/dist/serializer.d.ts new file mode 100644 index 0000000..e0f0a00 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/serializer.d.ts @@ -0,0 +1,43 @@ +/// +declare type ParseOpts = { + name?: string; + types?: number[]; + text: string; +}; +declare type ValueMapper = (param: any, index: number) => any; +declare type BindOpts = { + portal?: string; + binary?: boolean; + statement?: string; + values?: any[]; + valueMapper?: ValueMapper; +}; +declare type ExecOpts = { + portal?: string; + rows?: number; +}; +declare type PortalOpts = { + type: 'S' | 'P'; + name?: string; +}; +declare const serialize: { + startup: (opts: Record) => Buffer; + password: (password: string) => Buffer; + requestSsl: () => Buffer; + sendSASLInitialResponseMessage: (mechanism: string, initialResponse: string) => Buffer; + sendSCRAMClientFinalMessage: (additionalData: string) => Buffer; + query: (text: string) => Buffer; + parse: (query: ParseOpts) => Buffer; + bind: (config?: BindOpts) => Buffer; + execute: (config?: ExecOpts | undefined) => Buffer; + describe: (msg: PortalOpts) => Buffer; + close: (msg: PortalOpts) => Buffer; + flush: () => Buffer; + sync: () => Buffer; + end: () => Buffer; + copyData: (chunk: Buffer) => Buffer; + copyDone: () => Buffer; + copyFail: (message: string) => Buffer; + cancel: (processID: number, secretKey: number) => Buffer; +}; +export { serialize }; diff --git a/reverse_engineering/node_modules/pg-protocol/dist/serializer.js b/reverse_engineering/node_modules/pg-protocol/dist/serializer.js new file mode 100644 index 0000000..9aa0aed --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/serializer.js @@ -0,0 +1,189 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.serialize = void 0; +const buffer_writer_1 = require("./buffer-writer"); +const writer = new buffer_writer_1.Writer(); +const startup = (opts) => { + // protocol version + writer.addInt16(3).addInt16(0); + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]); + } + writer.addCString('client_encoding').addCString('UTF8'); + var bodyBuffer = writer.addCString('').flush(); + // this message is sent without a code + var length = bodyBuffer.length + 4; + return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush(); +}; +const requestSsl = () => { + const response = Buffer.allocUnsafe(8); + response.writeInt32BE(8, 0); + response.writeInt32BE(80877103, 4); + return response; +}; +const password = (password) => { + return writer.addCString(password).flush(112 /* startup */); +}; +const sendSASLInitialResponseMessage = function (mechanism, initialResponse) { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse); + return writer.flush(112 /* startup */); +}; +const sendSCRAMClientFinalMessage = function (additionalData) { + return writer.addString(additionalData).flush(112 /* startup */); +}; +const query = (text) => { + return writer.addCString(text).flush(81 /* query */); +}; +const emptyArray = []; +const parse = (query) => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + // normalize missing query names to allow for null + const name = query.name || ''; + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.'); + console.error('You supplied %s (%s)', name, name.length); + console.error('This can cause conflicts and silent errors executing queries'); + /* eslint-enable no-console */ + } + const types = query.types || emptyArray; + var len = types.length; + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len); + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]); + } + return writer.flush(80 /* parse */); +}; +const paramWriter = new buffer_writer_1.Writer(); +const writeValues = function (values, valueMapper) { + for (let i = 0; i < values.length; i++) { + const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]; + if (mappedVal == null) { + // add the param type (string) to the writer + writer.addInt16(0 /* STRING */); + // write -1 to the param writer to indicate null + paramWriter.addInt32(-1); + } + else if (mappedVal instanceof Buffer) { + // add the param type (binary) to the writer + writer.addInt16(1 /* BINARY */); + // add the buffer to the param writer + paramWriter.addInt32(mappedVal.length); + paramWriter.add(mappedVal); + } + else { + // add the param type (string) to the writer + writer.addInt16(0 /* STRING */); + paramWriter.addInt32(Buffer.byteLength(mappedVal)); + paramWriter.addString(mappedVal); + } + } +}; +const bind = (config = {}) => { + // normalize config + const portal = config.portal || ''; + const statement = config.statement || ''; + const binary = config.binary || false; + const values = config.values || emptyArray; + const len = values.length; + writer.addCString(portal).addCString(statement); + writer.addInt16(len); + writeValues(values, config.valueMapper); + writer.addInt16(len); + writer.add(paramWriter.flush()); + // format code + writer.addInt16(binary ? 1 /* BINARY */ : 0 /* STRING */); + return writer.flush(66 /* bind */); +}; +const emptyExecute = Buffer.from([69 /* execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]); +const execute = (config) => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute; + } + const portal = config.portal || ''; + const rows = config.rows || 0; + const portalLength = Buffer.byteLength(portal); + const len = 4 + portalLength + 1 + 4; + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len); + buff[0] = 69 /* execute */; + buff.writeInt32BE(len, 1); + buff.write(portal, 5, 'utf-8'); + buff[portalLength + 5] = 0; // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4); + return buff; +}; +const cancel = (processID, secretKey) => { + const buffer = Buffer.allocUnsafe(16); + buffer.writeInt32BE(16, 0); + buffer.writeInt16BE(1234, 4); + buffer.writeInt16BE(5678, 6); + buffer.writeInt32BE(processID, 8); + buffer.writeInt32BE(secretKey, 12); + return buffer; +}; +const cstringMessage = (code, string) => { + const stringLen = Buffer.byteLength(string); + const len = 4 + stringLen + 1; + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len); + buffer[0] = code; + buffer.writeInt32BE(len, 1); + buffer.write(string, 5, 'utf-8'); + buffer[len] = 0; // null terminate cString + return buffer; +}; +const emptyDescribePortal = writer.addCString('P').flush(68 /* describe */); +const emptyDescribeStatement = writer.addCString('S').flush(68 /* describe */); +const describe = (msg) => { + return msg.name + ? cstringMessage(68 /* describe */, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement; +}; +const close = (msg) => { + const text = `${msg.type}${msg.name || ''}`; + return cstringMessage(67 /* close */, text); +}; +const copyData = (chunk) => { + return writer.add(chunk).flush(100 /* copyFromChunk */); +}; +const copyFail = (message) => { + return cstringMessage(102 /* copyFail */, message); +}; +const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]); +const flushBuffer = codeOnlyBuffer(72 /* flush */); +const syncBuffer = codeOnlyBuffer(83 /* sync */); +const endBuffer = codeOnlyBuffer(88 /* end */); +const copyDoneBuffer = codeOnlyBuffer(99 /* copyDone */); +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +}; +exports.serialize = serialize; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/dist/serializer.js.map b/reverse_engineering/node_modules/pg-protocol/dist/serializer.js.map new file mode 100644 index 0000000..75d7119 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/dist/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../src/serializer.ts"],"names":[],"mappings":";;;AAAA,mDAAwC;AAkBxC,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,OAAO,GAAG,CAAC,IAA4B,EAAU,EAAE;IACvD,mBAAmB;IACnB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;KAC7C;IAED,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAEvD,IAAI,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAC9C,sCAAsC;IAEtC,IAAI,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAElC,OAAO,IAAI,sBAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE,CAAA;AAC9D,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,GAAW,EAAE;IAC9B,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;IACtC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAC3B,QAAQ,CAAC,YAAY,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAA;IAClC,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,QAAgB,EAAU,EAAE;IAC5C,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,KAAK,mBAAc,CAAA;AACxD,CAAC,CAAA;AAED,MAAM,8BAA8B,GAAG,UAAU,SAAiB,EAAE,eAAuB;IACzF,aAAa;IACb,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;IAEpG,OAAO,MAAM,CAAC,KAAK,mBAAc,CAAA;AACnC,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,UAAU,cAAsB;IAClE,OAAO,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,mBAAc,CAAA;AAC7D,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAU,EAAE;IACrC,OAAO,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,KAAK,gBAAY,CAAA;AAClD,CAAC,CAAA;AAQD,MAAM,UAAU,GAAU,EAAE,CAAA;AAE5B,MAAM,KAAK,GAAG,CAAC,KAAgB,EAAU,EAAE;IACzC,8BAA8B;IAC9B,uBAAuB;IACvB,gCAAgC;IAChC,8BAA8B;IAE9B,kDAAkD;IAClD,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,EAAE,CAAA;IAC7B,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;QACpB,+BAA+B;QAC/B,OAAO,CAAC,KAAK,CAAC,gEAAgE,CAAC,CAAA;QAC/E,OAAO,CAAC,KAAK,CAAC,sBAAsB,EAAE,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACxD,OAAO,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAA;QAC7E,8BAA8B;KAC/B;IAED,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,IAAI,UAAU,CAAA;IAEvC,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,CAAA;IAEtB,IAAI,MAAM,GAAG,MAAM;SAChB,UAAU,CAAC,IAAI,CAAC,CAAC,gBAAgB;SACjC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,oBAAoB;SAC3C,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEhB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;KAC1B;IAED,OAAO,MAAM,CAAC,KAAK,gBAAY,CAAA;AACjC,CAAC,CAAA;AAaD,MAAM,WAAW,GAAG,IAAI,sBAAM,EAAE,CAAA;AAQhC,MAAM,WAAW,GAAG,UAAU,MAAa,EAAE,WAAyB;IACpE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,SAAS,GAAG,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACrE,IAAI,SAAS,IAAI,IAAI,EAAE;YACrB,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,gBAAkB,CAAA;YACjC,gDAAgD;YAChD,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;SACzB;aAAM,IAAI,SAAS,YAAY,MAAM,EAAE;YACtC,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,gBAAkB,CAAA;YACjC,qCAAqC;YACrC,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;YACtC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAA;SAC3B;aAAM;YACL,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,gBAAkB,CAAA;YACjC,WAAW,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAA;YAClD,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;SACjC;KACF;AACH,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,CAAC,SAAmB,EAAE,EAAU,EAAE;IAC7C,mBAAmB;IACnB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;IACxC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAA;IACrC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,UAAU,CAAA;IAC1C,MAAM,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;IAEzB,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;IAC/C,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEpB,WAAW,CAAC,MAAM,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;IAEvC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IACpB,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAA;IAE/B,cAAc;IACd,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,gBAAkB,CAAC,eAAiB,CAAC,CAAA;IAC7D,OAAO,MAAM,CAAC,KAAK,eAAW,CAAA;AAChC,CAAC,CAAA;AAOD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,mBAAe,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEtG,MAAM,OAAO,GAAG,CAAC,MAAiB,EAAU,EAAE;IAC5C,0CAA0C;IAC1C,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC/C,OAAO,YAAY,CAAA;KACpB;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,CAAC,CAAA;IAE7B,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,CAAC,GAAG,YAAY,GAAG,CAAC,GAAG,CAAC,CAAA;IACpC,yBAAyB;IACzB,MAAM,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IACxC,IAAI,CAAC,CAAC,CAAC,mBAAe,CAAA;IACtB,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IACzB,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9B,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA,CAAC,gCAAgC;IAC3D,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IACzC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,MAAM,GAAG,CAAC,SAAiB,EAAE,SAAiB,EAAU,EAAE;IAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,CAAA;IACrC,MAAM,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;IAC1B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,CAAA;IACjC,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,EAAE,CAAC,CAAA;IAClC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAOD,MAAM,cAAc,GAAG,CAAC,IAAU,EAAE,MAAc,EAAU,EAAE;IAC5D,MAAM,SAAS,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC3C,MAAM,GAAG,GAAG,CAAC,GAAG,SAAS,GAAG,CAAC,CAAA;IAC7B,yBAAyB;IACzB,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IAC1C,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAA;IAChB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IAC3B,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAChC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA,CAAC,yBAAyB;IACzC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,mBAAe,CAAA;AACvE,MAAM,sBAAsB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,mBAAe,CAAA;AAE1E,MAAM,QAAQ,GAAG,CAAC,GAAe,EAAU,EAAE;IAC3C,OAAO,GAAG,CAAC,IAAI;QACb,CAAC,CAAC,cAAc,oBAAgB,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAC;QAC/D,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,GAAG;YAClB,CAAC,CAAC,mBAAmB;YACrB,CAAC,CAAC,sBAAsB,CAAA;AAC5B,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,GAAe,EAAU,EAAE;IACxC,MAAM,IAAI,GAAG,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAA;IAC3C,OAAO,cAAc,iBAAa,IAAI,CAAC,CAAA;AACzC,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,KAAa,EAAU,EAAE;IACzC,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,yBAAoB,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,OAAe,EAAU,EAAE;IAC3C,OAAO,cAAc,qBAAgB,OAAO,CAAC,CAAA;AAC/C,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,CAAC,IAAU,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAE1F,MAAM,WAAW,GAAG,cAAc,gBAAY,CAAA;AAC9C,MAAM,UAAU,GAAG,cAAc,eAAW,CAAA;AAC5C,MAAM,SAAS,GAAG,cAAc,cAAU,CAAA;AAC1C,MAAM,cAAc,GAAG,cAAc,mBAAe,CAAA;AAEpD,MAAM,SAAS,GAAG;IAChB,OAAO;IACP,QAAQ;IACR,UAAU;IACV,8BAA8B;IAC9B,2BAA2B;IAC3B,KAAK;IACL,KAAK;IACL,IAAI;IACJ,OAAO;IACP,QAAQ;IACR,KAAK;IACL,KAAK,EAAE,GAAG,EAAE,CAAC,WAAW;IACxB,IAAI,EAAE,GAAG,EAAE,CAAC,UAAU;IACtB,GAAG,EAAE,GAAG,EAAE,CAAC,SAAS;IACpB,QAAQ;IACR,QAAQ,EAAE,GAAG,EAAE,CAAC,cAAc;IAC9B,QAAQ;IACR,MAAM;CACP,CAAA;AAEQ,8BAAS"} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/package.json b/reverse_engineering/node_modules/pg-protocol/package.json new file mode 100644 index 0000000..3a7ce52 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/package.json @@ -0,0 +1,64 @@ +{ + "_from": "pg-protocol@^1.5.0", + "_id": "pg-protocol@1.5.0", + "_inBundle": false, + "_integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==", + "_location": "/pg-protocol", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-protocol@^1.5.0", + "name": "pg-protocol", + "escapedName": "pg-protocol", + "rawSpec": "^1.5.0", + "saveSpec": null, + "fetchSpec": "^1.5.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "_shasum": "b5dd452257314565e2d54ab3c132adc46565a6a0", + "_spec": "pg-protocol@^1.5.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "bugs": { + "url": "https://github.com/brianc/node-postgres/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The postgres client/server binary protocol, implemented in TypeScript", + "devDependencies": { + "@types/chai": "^4.2.7", + "@types/mocha": "^5.2.7", + "@types/node": "^12.12.21", + "chai": "^4.2.0", + "chunky": "^0.0.0", + "mocha": "^7.1.2", + "ts-node": "^8.5.4", + "typescript": "^4.0.3" + }, + "files": [ + "/dist/*{js,ts,map}", + "/src" + ], + "gitHead": "d45947938263bec30a1e3252452f04177b785f66", + "homepage": "https://github.com/brianc/node-postgres#readme", + "license": "MIT", + "main": "dist/index.js", + "name": "pg-protocol", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-protocol" + }, + "scripts": { + "build": "tsc", + "build:watch": "tsc --watch", + "prepublish": "yarn build", + "pretest": "yarn build", + "test": "mocha dist/**/*.test.js" + }, + "types": "dist/index.d.ts", + "version": "1.5.0" +} diff --git a/reverse_engineering/node_modules/pg-protocol/src/b.ts b/reverse_engineering/node_modules/pg-protocol/src/b.ts new file mode 100644 index 0000000..028b763 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/b.ts @@ -0,0 +1,28 @@ +// file for microbenchmarking + +import { Writer } from './buffer-writer' +import { serialize } from './index' +import { BufferReader } from './buffer-reader' + +const LOOPS = 1000 +let count = 0 +let start = Date.now() +const writer = new Writer() + +const reader = new BufferReader() +const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]) + +const run = () => { + if (count > LOOPS) { + console.log(Date.now() - start) + return + } + count++ + for (let i = 0; i < LOOPS; i++) { + reader.setBuffer(0, buffer) + reader.cstring() + } + setImmediate(run) +} + +run() diff --git a/reverse_engineering/node_modules/pg-protocol/src/buffer-reader.ts b/reverse_engineering/node_modules/pg-protocol/src/buffer-reader.ts new file mode 100644 index 0000000..2305e13 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/buffer-reader.ts @@ -0,0 +1,53 @@ +const emptyBuffer = Buffer.allocUnsafe(0) + +export class BufferReader { + private buffer: Buffer = emptyBuffer + + // TODO(bmc): support non-utf8 encoding? + private encoding: string = 'utf-8' + + constructor(private offset: number = 0) {} + + public setBuffer(offset: number, buffer: Buffer): void { + this.offset = offset + this.buffer = buffer + } + + public int16(): number { + const result = this.buffer.readInt16BE(this.offset) + this.offset += 2 + return result + } + + public byte(): number { + const result = this.buffer[this.offset] + this.offset++ + return result + } + + public int32(): number { + const result = this.buffer.readInt32BE(this.offset) + this.offset += 4 + return result + } + + public string(length: number): string { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length) + this.offset += length + return result + } + + public cstring(): string { + const start = this.offset + let end = start + while (this.buffer[end++] !== 0) {} + this.offset = end + return this.buffer.toString(this.encoding, start, end - 1) + } + + public bytes(length: number): Buffer { + const result = this.buffer.slice(this.offset, this.offset + length) + this.offset += length + return result + } +} diff --git a/reverse_engineering/node_modules/pg-protocol/src/buffer-writer.ts b/reverse_engineering/node_modules/pg-protocol/src/buffer-writer.ts new file mode 100644 index 0000000..756cdc9 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/buffer-writer.ts @@ -0,0 +1,85 @@ +//binary data writer tuned for encoding binary specific to the postgres binary protocol + +export class Writer { + private buffer: Buffer + private offset: number = 5 + private headerPosition: number = 0 + constructor(private size = 256) { + this.buffer = Buffer.allocUnsafe(size) + } + + private ensure(size: number): void { + var remaining = this.buffer.length - this.offset + if (remaining < size) { + var oldBuffer = this.buffer + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size + this.buffer = Buffer.allocUnsafe(newSize) + oldBuffer.copy(this.buffer) + } + } + + public addInt32(num: number): Writer { + this.ensure(4) + this.buffer[this.offset++] = (num >>> 24) & 0xff + this.buffer[this.offset++] = (num >>> 16) & 0xff + this.buffer[this.offset++] = (num >>> 8) & 0xff + this.buffer[this.offset++] = (num >>> 0) & 0xff + return this + } + + public addInt16(num: number): Writer { + this.ensure(2) + this.buffer[this.offset++] = (num >>> 8) & 0xff + this.buffer[this.offset++] = (num >>> 0) & 0xff + return this + } + + public addCString(string: string): Writer { + if (!string) { + this.ensure(1) + } else { + var len = Buffer.byteLength(string) + this.ensure(len + 1) // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8') + this.offset += len + } + + this.buffer[this.offset++] = 0 // null terminator + return this + } + + public addString(string: string = ''): Writer { + var len = Buffer.byteLength(string) + this.ensure(len) + this.buffer.write(string, this.offset) + this.offset += len + return this + } + + public add(otherBuffer: Buffer): Writer { + this.ensure(otherBuffer.length) + otherBuffer.copy(this.buffer, this.offset) + this.offset += otherBuffer.length + return this + } + + private join(code?: number): Buffer { + if (code) { + this.buffer[this.headerPosition] = code + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1) + this.buffer.writeInt32BE(length, this.headerPosition + 1) + } + return this.buffer.slice(code ? 0 : 5, this.offset) + } + + public flush(code?: number): Buffer { + var result = this.join(code) + this.offset = 5 + this.headerPosition = 0 + this.buffer = Buffer.allocUnsafe(this.size) + return result + } +} diff --git a/reverse_engineering/node_modules/pg-protocol/src/inbound-parser.test.ts b/reverse_engineering/node_modules/pg-protocol/src/inbound-parser.test.ts new file mode 100644 index 0000000..364bd8d --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/inbound-parser.test.ts @@ -0,0 +1,557 @@ +import buffers from './testing/test-buffers' +import BufferList from './testing/buffer-list' +import { parse } from '.' +import assert from 'assert' +import { PassThrough } from 'stream' +import { BackendMessage } from './messages' + +var authOkBuffer = buffers.authenticationOk() +var paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8') +var readyForQueryBuffer = buffers.readyForQuery() +var backendKeyDataBuffer = buffers.backendKeyData(1, 2) +var commandCompleteBuffer = buffers.commandComplete('SELECT 3') +var parseCompleteBuffer = buffers.parseComplete() +var bindCompleteBuffer = buffers.bindComplete() +var portalSuspendedBuffer = buffers.portalSuspended() + +var addRow = function (bufferList: BufferList, name: string, offset: number) { + return bufferList + .addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0) // format code, 0 => text +} + +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0, +} +var oneRowDescBuff = buffers.rowDescription([row1]) +row1.name = 'bang' + +var twoRowBuf = buffers.rowDescription([ + row1, + { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0, + }, +]) + +var emptyRowFieldBuf = new BufferList().addInt16(0).join(true, 'D') + +var emptyRowFieldBuf = buffers.dataRow([]) + +var oneFieldBuf = new BufferList() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D') + +var oneFieldBuf = buffers.dataRow(['test']) + +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8, +} + +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25, +} + +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2, +} + +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I', +} + +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3', +} +var emptyRowDescriptionBuffer = new BufferList() + .addInt16(0) // number of fields + .join(true, 'T') + +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +} +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [ + { + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + ], +} + +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [ + { + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text', + }, + ], +} + +var emptyParameterDescriptionBuffer = new BufferList() + .addInt16(0) // number of parameters + .join(true, 't') + +var oneParameterDescBuf = buffers.parameterDescription([1111]) + +var twoParameterDescBuf = buffers.parameterDescription([2222, 3333]) + +var expectedEmptyParameterDescriptionMessage = { + name: 'parameterDescription', + length: 6, + parameterCount: 0, + dataTypeIDs: [], +} + +var expectedOneParameterMessage = { + name: 'parameterDescription', + length: 10, + parameterCount: 1, + dataTypeIDs: [1111], +} + +var expectedTwoParameterMessage = { + name: 'parameterDescription', + length: 14, + parameterCount: 2, + dataTypeIDs: [2222, 3333], +} + +var testForMessage = function (buffer: Buffer, expectedMessage: any) { + it('recieves and parses ' + expectedMessage.name, async () => { + const messages = await parseBuffers([buffer]) + const [lastMessage] = messages + + for (const key in expectedMessage) { + assert.deepEqual((lastMessage as any)[key], expectedMessage[key]) + } + }) +} + +var plainPasswordBuffer = buffers.authenticationCleartextPassword() +var md5PasswordBuffer = buffers.authenticationMD5Password() +var SASLBuffer = buffers.authenticationSASL() +var SASLContinueBuffer = buffers.authenticationSASLContinue() +var SASLFinalBuffer = buffers.authenticationSASLFinal() + +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword', +} + +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]), +} + +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'], +} + +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +} + +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +} + +var notificationResponseBuffer = buffers.notification(4, 'hi', 'boom') +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom', +} + +const parseBuffers = async (buffers: Buffer[]): Promise => { + const stream = new PassThrough() + for (const buffer of buffers) { + stream.write(buffer) + } + stream.end() + const msgs: BackendMessage[] = [] + await parse(stream, (msg) => msgs.push(msg)) + return msgs +} + +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage) + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage) + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage) + testForMessage(SASLBuffer, expectedSASLMessage) + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage) + + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]) + testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage) + + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage) + + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]) + testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage) + + testForMessage(paramStatusBuffer, expectedParameterStatusMessage) + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage) + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage) + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage) + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage) + testForMessage(buffers.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }) + + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData', + }) + + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage) + testForMessage(oneRowDescBuff, expectedOneRowMessage) + testForMessage(twoRowBuf, expectedTwoRowMessage) + }) + + describe('parameterDescription messages', function () { + testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage) + testForMessage(oneParameterDescBuf, expectedOneParameterMessage) + testForMessage(twoParameterDescBuf, expectedTwoParameterMessage) + }) + + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0, + }) + }) + + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'], + }) + }) + }) + + describe('notice message', function () { + // this uses the same logic as error message + var buff = buffers.notice([{ type: 'C', value: 'code' }]) + testForMessage(buff, { + name: 'notice', + code: 'code', + }) + }) + + testForMessage(buffers.error([]), { + name: 'error', + }) + + describe('with all the fields', function () { + var buffer = buffers.error([ + { + type: 'S', + value: 'ERROR', + }, + { + type: 'C', + value: 'code', + }, + { + type: 'M', + value: 'message', + }, + { + type: 'D', + value: 'details', + }, + { + type: 'H', + value: 'hint', + }, + { + type: 'P', + value: '100', + }, + { + type: 'p', + value: '101', + }, + { + type: 'q', + value: 'query', + }, + { + type: 'W', + value: 'where', + }, + { + type: 'F', + value: 'file', + }, + { + type: 'L', + value: 'line', + }, + { + type: 'R', + value: 'routine', + }, + { + type: 'Z', // ignored + value: 'alsdkf', + }, + ]) + + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine', + }) + }) + + testForMessage(parseCompleteBuffer, { + name: 'parseComplete', + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }) + + testForMessage(buffers.closeComplete(), { + name: 'closeComplete', + }) + + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended', + }) + }) + + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4, + }) + }) + + describe('copy', () => { + testForMessage(buffers.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [], + }) + + testForMessage(buffers.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1], + }) + + testForMessage(buffers.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [], + }) + + testForMessage(buffers.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2], + }) + + testForMessage(buffers.copyDone(), { + name: 'copyDone', + length: 4, + }) + + testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]), + }) + }) + + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!']) + + it('parses when full buffer comes in', async function () { + const messages = await parseBuffers([fullBuffer]) + const message = messages[0] as any + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + }) + + var testMessageRecievedAfterSpiltAt = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parseBuffers([fullBuffer]) + const message = messages[0] as any + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + } + + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6) + }) + + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2) + }) + + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5) + }) + }) + + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = buffers.dataRow(['!']) + var readyForQueryBuffer = buffers.readyForQuery() + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length) + dataRowBuffer.copy(fullBuffer, 0, 0) + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0) + + var verifyMessages = function (messages: any[]) { + assert.strictEqual(messages.length, 2) + assert.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'], + }) + assert.equal(messages[0].fields[0], '!') + assert.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I', + }) + } + // sanity check + it('recieves both messages when packet is not split', async function () { + const messages = await parseBuffers([fullBuffer]) + verifyMessages(messages) + }) + + var splitAndVerifyTwoMessages = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parseBuffers([firstBuffer, secondBuffer]) + verifyMessages(messages) + } + + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11) + }) + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6), + ]) + }) + + it('at the end', function () { + return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]) + }) + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-protocol/src/index.ts b/reverse_engineering/node_modules/pg-protocol/src/index.ts new file mode 100644 index 0000000..00491ff --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/index.ts @@ -0,0 +1,11 @@ +import { BackendMessage, DatabaseError } from './messages' +import { serialize } from './serializer' +import { Parser, MessageCallback } from './parser' + +export function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise { + const parser = new Parser() + stream.on('data', (buffer: Buffer) => parser.parse(buffer, callback)) + return new Promise((resolve) => stream.on('end', () => resolve())) +} + +export { serialize, DatabaseError } diff --git a/reverse_engineering/node_modules/pg-protocol/src/messages.ts b/reverse_engineering/node_modules/pg-protocol/src/messages.ts new file mode 100644 index 0000000..7eab845 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/messages.ts @@ -0,0 +1,230 @@ +export type Mode = 'text' | 'binary' + +export type MessageName = + | 'parseComplete' + | 'bindComplete' + | 'closeComplete' + | 'noData' + | 'portalSuspended' + | 'replicationStart' + | 'emptyQuery' + | 'copyDone' + | 'copyData' + | 'rowDescription' + | 'parameterDescription' + | 'parameterStatus' + | 'backendKeyData' + | 'notification' + | 'readyForQuery' + | 'commandComplete' + | 'dataRow' + | 'copyInResponse' + | 'copyOutResponse' + | 'authenticationOk' + | 'authenticationMD5Password' + | 'authenticationCleartextPassword' + | 'authenticationSASL' + | 'authenticationSASLContinue' + | 'authenticationSASLFinal' + | 'error' + | 'notice' + +export interface BackendMessage { + name: MessageName + length: number +} + +export const parseComplete: BackendMessage = { + name: 'parseComplete', + length: 5, +} + +export const bindComplete: BackendMessage = { + name: 'bindComplete', + length: 5, +} + +export const closeComplete: BackendMessage = { + name: 'closeComplete', + length: 5, +} + +export const noData: BackendMessage = { + name: 'noData', + length: 5, +} + +export const portalSuspended: BackendMessage = { + name: 'portalSuspended', + length: 5, +} + +export const replicationStart: BackendMessage = { + name: 'replicationStart', + length: 4, +} + +export const emptyQuery: BackendMessage = { + name: 'emptyQuery', + length: 4, +} + +export const copyDone: BackendMessage = { + name: 'copyDone', + length: 4, +} + +interface NoticeOrError { + message: string | undefined + severity: string | undefined + code: string | undefined + detail: string | undefined + hint: string | undefined + position: string | undefined + internalPosition: string | undefined + internalQuery: string | undefined + where: string | undefined + schema: string | undefined + table: string | undefined + column: string | undefined + dataType: string | undefined + constraint: string | undefined + file: string | undefined + line: string | undefined + routine: string | undefined +} + +export class DatabaseError extends Error implements NoticeOrError { + public severity: string | undefined + public code: string | undefined + public detail: string | undefined + public hint: string | undefined + public position: string | undefined + public internalPosition: string | undefined + public internalQuery: string | undefined + public where: string | undefined + public schema: string | undefined + public table: string | undefined + public column: string | undefined + public dataType: string | undefined + public constraint: string | undefined + public file: string | undefined + public line: string | undefined + public routine: string | undefined + constructor(message: string, public readonly length: number, public readonly name: MessageName) { + super(message) + } +} + +export class CopyDataMessage { + public readonly name = 'copyData' + constructor(public readonly length: number, public readonly chunk: Buffer) {} +} + +export class CopyResponse { + public readonly columnTypes: number[] + constructor( + public readonly length: number, + public readonly name: MessageName, + public readonly binary: boolean, + columnCount: number + ) { + this.columnTypes = new Array(columnCount) + } +} + +export class Field { + constructor( + public readonly name: string, + public readonly tableID: number, + public readonly columnID: number, + public readonly dataTypeID: number, + public readonly dataTypeSize: number, + public readonly dataTypeModifier: number, + public readonly format: Mode + ) {} +} + +export class RowDescriptionMessage { + public readonly name: MessageName = 'rowDescription' + public readonly fields: Field[] + constructor(public readonly length: number, public readonly fieldCount: number) { + this.fields = new Array(this.fieldCount) + } +} + +export class ParameterDescriptionMessage { + public readonly name: MessageName = 'parameterDescription' + public readonly dataTypeIDs: number[] + constructor(public readonly length: number, public readonly parameterCount: number) { + this.dataTypeIDs = new Array(this.parameterCount) + } +} + +export class ParameterStatusMessage { + public readonly name: MessageName = 'parameterStatus' + constructor( + public readonly length: number, + public readonly parameterName: string, + public readonly parameterValue: string + ) {} +} + +export class AuthenticationMD5Password implements BackendMessage { + public readonly name: MessageName = 'authenticationMD5Password' + constructor(public readonly length: number, public readonly salt: Buffer) {} +} + +export class BackendKeyDataMessage { + public readonly name: MessageName = 'backendKeyData' + constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) {} +} + +export class NotificationResponseMessage { + public readonly name: MessageName = 'notification' + constructor( + public readonly length: number, + public readonly processId: number, + public readonly channel: string, + public readonly payload: string + ) {} +} + +export class ReadyForQueryMessage { + public readonly name: MessageName = 'readyForQuery' + constructor(public readonly length: number, public readonly status: string) {} +} + +export class CommandCompleteMessage { + public readonly name: MessageName = 'commandComplete' + constructor(public readonly length: number, public readonly text: string) {} +} + +export class DataRowMessage { + public readonly fieldCount: number + public readonly name: MessageName = 'dataRow' + constructor(public length: number, public fields: any[]) { + this.fieldCount = fields.length + } +} + +export class NoticeMessage implements BackendMessage, NoticeOrError { + constructor(public readonly length: number, public readonly message: string | undefined) {} + public readonly name = 'notice' + public severity: string | undefined + public code: string | undefined + public detail: string | undefined + public hint: string | undefined + public position: string | undefined + public internalPosition: string | undefined + public internalQuery: string | undefined + public where: string | undefined + public schema: string | undefined + public table: string | undefined + public column: string | undefined + public dataType: string | undefined + public constraint: string | undefined + public file: string | undefined + public line: string | undefined + public routine: string | undefined +} diff --git a/reverse_engineering/node_modules/pg-protocol/src/outbound-serializer.test.ts b/reverse_engineering/node_modules/pg-protocol/src/outbound-serializer.test.ts new file mode 100644 index 0000000..f6669be --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/outbound-serializer.test.ts @@ -0,0 +1,272 @@ +import assert from 'assert' +import { serialize } from './serializer' +import BufferList from './testing/buffer-list' + +describe('serializer', () => { + it('builds startup message', function () { + const actual = serialize.startup({ + user: 'brian', + database: 'bang', + }) + assert.deepEqual( + actual, + new BufferList() + .addInt16(3) + .addInt16(0) + .addCString('user') + .addCString('brian') + .addCString('database') + .addCString('bang') + .addCString('client_encoding') + .addCString('UTF8') + .addCString('') + .join(true) + ) + }) + + it('builds password message', function () { + const actual = serialize.password('!') + assert.deepEqual(actual, new BufferList().addCString('!').join(true, 'p')) + }) + + it('builds request ssl message', function () { + const actual = serialize.requestSsl() + const expected = new BufferList().addInt32(80877103).join(true) + assert.deepEqual(actual, expected) + }) + + it('builds SASLInitialResponseMessage message', function () { + const actual = serialize.sendSASLInitialResponseMessage('mech', 'data') + assert.deepEqual(actual, new BufferList().addCString('mech').addInt32(4).addString('data').join(true, 'p')) + }) + + it('builds SCRAMClientFinalMessage message', function () { + const actual = serialize.sendSCRAMClientFinalMessage('data') + assert.deepEqual(actual, new BufferList().addString('data').join(true, 'p')) + }) + + it('builds query message', function () { + var txt = 'select * from boom' + const actual = serialize.query(txt) + assert.deepEqual(actual, new BufferList().addCString(txt).join(true, 'Q')) + }) + + describe('parse message', () => { + it('builds parse message', function () { + const actual = serialize.parse({ text: '!' }) + var expected = new BufferList().addCString('').addCString('!').addInt16(0).join(true, 'P') + assert.deepEqual(actual, expected) + }) + + it('builds parse message with named query', function () { + const actual = serialize.parse({ + name: 'boom', + text: 'select * from boom', + types: [], + }) + var expected = new BufferList().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P') + assert.deepEqual(actual, expected) + }) + + it('with multiple parameters', function () { + const actual = serialize.parse({ + name: 'force', + text: 'select * from bang where name = $1', + types: [1, 2, 3, 4], + }) + var expected = new BufferList() + .addCString('force') + .addCString('select * from bang where name = $1') + .addInt16(4) + .addInt32(1) + .addInt32(2) + .addInt32(3) + .addInt32(4) + .join(true, 'P') + assert.deepEqual(actual, expected) + }) + }) + + describe('bind messages', function () { + it('with no values', function () { + const actual = serialize.bind() + + var expectedBuffer = new BufferList() + .addCString('') + .addCString('') + .addInt16(0) + .addInt16(0) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + it('with named statement, portal, and values', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing')) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + }) + + it('with custom valueMapper', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + valueMapper: () => null, + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(0) + .addInt16(4) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt32(-1) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + it('with named statement, portal, and buffer value', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, Buffer.from('zing', 'utf8')], + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) // value count + .addInt16(0) // string + .addInt16(0) // string + .addInt16(0) // string + .addInt16(1) // binary + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing', 'utf-8')) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + describe('builds execute message', function () { + it('for unamed portal with no row limit', function () { + const actual = serialize.execute() + var expectedBuffer = new BufferList().addCString('').addInt32(0).join(true, 'E') + assert.deepEqual(actual, expectedBuffer) + }) + + it('for named portal with row limit', function () { + const actual = serialize.execute({ + portal: 'my favorite portal', + rows: 100, + }) + var expectedBuffer = new BufferList().addCString('my favorite portal').addInt32(100).join(true, 'E') + assert.deepEqual(actual, expectedBuffer) + }) + }) + + it('builds flush command', function () { + const actual = serialize.flush() + var expected = new BufferList().join(true, 'H') + assert.deepEqual(actual, expected) + }) + + it('builds sync command', function () { + const actual = serialize.sync() + var expected = new BufferList().join(true, 'S') + assert.deepEqual(actual, expected) + }) + + it('builds end command', function () { + const actual = serialize.end() + var expected = Buffer.from([0x58, 0, 0, 0, 4]) + assert.deepEqual(actual, expected) + }) + + describe('builds describe command', function () { + it('describe statement', function () { + const actual = serialize.describe({ type: 'S', name: 'bang' }) + var expected = new BufferList().addChar('S').addCString('bang').join(true, 'D') + assert.deepEqual(actual, expected) + }) + + it('describe unnamed portal', function () { + const actual = serialize.describe({ type: 'P' }) + var expected = new BufferList().addChar('P').addCString('').join(true, 'D') + assert.deepEqual(actual, expected) + }) + }) + + describe('builds close command', function () { + it('describe statement', function () { + const actual = serialize.close({ type: 'S', name: 'bang' }) + var expected = new BufferList().addChar('S').addCString('bang').join(true, 'C') + assert.deepEqual(actual, expected) + }) + + it('describe unnamed portal', function () { + const actual = serialize.close({ type: 'P' }) + var expected = new BufferList().addChar('P').addCString('').join(true, 'C') + assert.deepEqual(actual, expected) + }) + }) + + describe('copy messages', function () { + it('builds copyFromChunk', () => { + const actual = serialize.copyData(Buffer.from([1, 2, 3])) + const expected = new BufferList().add(Buffer.from([1, 2, 3])).join(true, 'd') + assert.deepEqual(actual, expected) + }) + + it('builds copy fail', () => { + const actual = serialize.copyFail('err!') + const expected = new BufferList().addCString('err!').join(true, 'f') + assert.deepEqual(actual, expected) + }) + + it('builds copy done', () => { + const actual = serialize.copyDone() + const expected = new BufferList().join(true, 'c') + assert.deepEqual(actual, expected) + }) + }) + + it('builds cancel message', () => { + const actual = serialize.cancel(3, 4) + const expected = new BufferList().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true) + assert.deepEqual(actual, expected) + }) +}) diff --git a/reverse_engineering/node_modules/pg-protocol/src/parser.ts b/reverse_engineering/node_modules/pg-protocol/src/parser.ts new file mode 100644 index 0000000..f900193 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/parser.ts @@ -0,0 +1,389 @@ +import { TransformOptions } from 'stream' +import { + Mode, + bindComplete, + parseComplete, + closeComplete, + noData, + portalSuspended, + copyDone, + replicationStart, + emptyQuery, + ReadyForQueryMessage, + CommandCompleteMessage, + CopyDataMessage, + CopyResponse, + NotificationResponseMessage, + RowDescriptionMessage, + ParameterDescriptionMessage, + Field, + DataRowMessage, + ParameterStatusMessage, + BackendKeyDataMessage, + DatabaseError, + BackendMessage, + MessageName, + AuthenticationMD5Password, + NoticeMessage, +} from './messages' +import { BufferReader } from './buffer-reader' +import assert from 'assert' + +// every message is prefixed with a single bye +const CODE_LENGTH = 1 +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4 + +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH + +export type Packet = { + code: number + packet: Buffer +} + +const emptyBuffer = Buffer.allocUnsafe(0) + +type StreamOptions = TransformOptions & { + mode: Mode +} + +const enum MessageCodes { + DataRow = 0x44, // D + ParseComplete = 0x31, // 1 + BindComplete = 0x32, // 2 + CloseComplete = 0x33, // 3 + CommandComplete = 0x43, // C + ReadyForQuery = 0x5a, // Z + NoData = 0x6e, // n + NotificationResponse = 0x41, // A + AuthenticationResponse = 0x52, // R + ParameterStatus = 0x53, // S + BackendKeyData = 0x4b, // K + ErrorMessage = 0x45, // E + NoticeMessage = 0x4e, // N + RowDescriptionMessage = 0x54, // T + ParameterDescriptionMessage = 0x74, // t + PortalSuspended = 0x73, // s + ReplicationStart = 0x57, // W + EmptyQuery = 0x49, // I + CopyIn = 0x47, // G + CopyOut = 0x48, // H + CopyDone = 0x63, // c + CopyData = 0x64, // d +} + +export type MessageCallback = (msg: BackendMessage) => void + +export class Parser { + private buffer: Buffer = emptyBuffer + private bufferLength: number = 0 + private bufferOffset: number = 0 + private reader = new BufferReader() + private mode: Mode + + constructor(opts?: StreamOptions) { + if (opts?.mode === 'binary') { + throw new Error('Binary mode not supported yet') + } + this.mode = opts?.mode || 'text' + } + + public parse(buffer: Buffer, callback: MessageCallback) { + this.mergeBuffer(buffer) + const bufferFullLength = this.bufferOffset + this.bufferLength + let offset = this.bufferOffset + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset] + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH) + const fullMessageLength = CODE_LENGTH + length + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer) + callback(message) + offset += fullMessageLength + } else { + break + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer + this.bufferLength = 0 + this.bufferOffset = 0 + } else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset + this.bufferOffset = offset + } + } + + private mergeBuffer(buffer: Buffer): void { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength + const newFullLength = newLength + this.bufferOffset + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer: Buffer + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer + } else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2 + while (newLength >= newBufferLength) { + newBufferLength *= 2 + } + newBuffer = Buffer.allocUnsafe(newBufferLength) + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength) + this.buffer = newBuffer + this.bufferOffset = 0 + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength) + this.bufferLength = newLength + } else { + this.buffer = buffer + this.bufferOffset = 0 + this.bufferLength = buffer.byteLength + } + } + + private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage { + switch (code) { + case MessageCodes.BindComplete: + return bindComplete + case MessageCodes.ParseComplete: + return parseComplete + case MessageCodes.CloseComplete: + return closeComplete + case MessageCodes.NoData: + return noData + case MessageCodes.PortalSuspended: + return portalSuspended + case MessageCodes.CopyDone: + return copyDone + case MessageCodes.ReplicationStart: + return replicationStart + case MessageCodes.EmptyQuery: + return emptyQuery + case MessageCodes.DataRow: + return this.parseDataRowMessage(offset, length, bytes) + case MessageCodes.CommandComplete: + return this.parseCommandCompleteMessage(offset, length, bytes) + case MessageCodes.ReadyForQuery: + return this.parseReadyForQueryMessage(offset, length, bytes) + case MessageCodes.NotificationResponse: + return this.parseNotificationMessage(offset, length, bytes) + case MessageCodes.AuthenticationResponse: + return this.parseAuthenticationResponse(offset, length, bytes) + case MessageCodes.ParameterStatus: + return this.parseParameterStatusMessage(offset, length, bytes) + case MessageCodes.BackendKeyData: + return this.parseBackendKeyData(offset, length, bytes) + case MessageCodes.ErrorMessage: + return this.parseErrorMessage(offset, length, bytes, 'error') + case MessageCodes.NoticeMessage: + return this.parseErrorMessage(offset, length, bytes, 'notice') + case MessageCodes.RowDescriptionMessage: + return this.parseRowDescriptionMessage(offset, length, bytes) + case MessageCodes.ParameterDescriptionMessage: + return this.parseParameterDescriptionMessage(offset, length, bytes) + case MessageCodes.CopyIn: + return this.parseCopyInMessage(offset, length, bytes) + case MessageCodes.CopyOut: + return this.parseCopyOutMessage(offset, length, bytes) + case MessageCodes.CopyData: + return this.parseCopyData(offset, length, bytes) + default: + assert.fail(`unknown message code: ${code.toString(16)}`) + } + } + + private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const status = this.reader.string(1) + return new ReadyForQueryMessage(length, status) + } + + private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const text = this.reader.cstring() + return new CommandCompleteMessage(length, text) + } + + private parseCopyData(offset: number, length: number, bytes: Buffer) { + const chunk = bytes.slice(offset, offset + (length - 4)) + return new CopyDataMessage(length, chunk) + } + + private parseCopyInMessage(offset: number, length: number, bytes: Buffer) { + return this.parseCopyMessage(offset, length, bytes, 'copyInResponse') + } + + private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) { + return this.parseCopyMessage(offset, length, bytes, 'copyOutResponse') + } + + private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: MessageName) { + this.reader.setBuffer(offset, bytes) + const isBinary = this.reader.byte() !== 0 + const columnCount = this.reader.int16() + const message = new CopyResponse(length, messageName, isBinary, columnCount) + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16() + } + return message + } + + private parseNotificationMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const processId = this.reader.int32() + const channel = this.reader.cstring() + const payload = this.reader.cstring() + return new NotificationResponseMessage(length, processId, channel, payload) + } + + private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const fieldCount = this.reader.int16() + const message = new RowDescriptionMessage(length, fieldCount) + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField() + } + return message + } + + private parseField(): Field { + const name = this.reader.cstring() + const tableID = this.reader.int32() + const columnID = this.reader.int16() + const dataTypeID = this.reader.int32() + const dataTypeSize = this.reader.int16() + const dataTypeModifier = this.reader.int32() + const mode = this.reader.int16() === 0 ? 'text' : 'binary' + return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode) + } + + private parseParameterDescriptionMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const parameterCount = this.reader.int16() + const message = new ParameterDescriptionMessage(length, parameterCount) + for (let i = 0; i < parameterCount; i++) { + message.dataTypeIDs[i] = this.reader.int32() + } + return message + } + + private parseDataRowMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const fieldCount = this.reader.int16() + const fields: any[] = new Array(fieldCount) + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32() + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len) + } + return new DataRowMessage(length, fields) + } + + private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const name = this.reader.cstring() + const value = this.reader.cstring() + return new ParameterStatusMessage(length, name, value) + } + + private parseBackendKeyData(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const processID = this.reader.int32() + const secretKey = this.reader.int32() + return new BackendKeyDataMessage(length, processID, secretKey) + } + + public parseAuthenticationResponse(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const code = this.reader.int32() + // TODO(bmc): maybe better types here + const message: BackendMessage & any = { + name: 'authenticationOk', + length, + } + + switch (code) { + case 0: // AuthenticationOk + break + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = 'authenticationCleartextPassword' + } + break + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = 'authenticationMD5Password' + const salt = this.reader.bytes(4) + return new AuthenticationMD5Password(length, salt) + } + break + case 10: // AuthenticationSASL + message.name = 'authenticationSASL' + message.mechanisms = [] + let mechanism: string + do { + mechanism = this.reader.cstring() + + if (mechanism) { + message.mechanisms.push(mechanism) + } + } while (mechanism) + break + case 11: // AuthenticationSASLContinue + message.name = 'authenticationSASLContinue' + message.data = this.reader.string(length - 8) + break + case 12: // AuthenticationSASLFinal + message.name = 'authenticationSASLFinal' + message.data = this.reader.string(length - 8) + break + default: + throw new Error('Unknown authenticationOk message type ' + code) + } + return message + } + + private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: MessageName) { + this.reader.setBuffer(offset, bytes) + const fields: Record = {} + let fieldType = this.reader.string(1) + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring() + fieldType = this.reader.string(1) + } + + const messageValue = fields.M + + const message = + name === 'notice' ? new NoticeMessage(length, messageValue) : new DatabaseError(messageValue, length, name) + + message.severity = fields.S + message.code = fields.C + message.detail = fields.D + message.hint = fields.H + message.position = fields.P + message.internalPosition = fields.p + message.internalQuery = fields.q + message.where = fields.W + message.schema = fields.s + message.table = fields.t + message.column = fields.c + message.dataType = fields.d + message.constraint = fields.n + message.file = fields.F + message.line = fields.L + message.routine = fields.R + return message + } +} diff --git a/reverse_engineering/node_modules/pg-protocol/src/serializer.ts b/reverse_engineering/node_modules/pg-protocol/src/serializer.ts new file mode 100644 index 0000000..07e2fe4 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/serializer.ts @@ -0,0 +1,274 @@ +import { Writer } from './buffer-writer' + +const enum code { + startup = 0x70, + query = 0x51, + parse = 0x50, + bind = 0x42, + execute = 0x45, + flush = 0x48, + sync = 0x53, + end = 0x58, + close = 0x43, + describe = 0x44, + copyFromChunk = 0x64, + copyDone = 0x63, + copyFail = 0x66, +} + +const writer = new Writer() + +const startup = (opts: Record): Buffer => { + // protocol version + writer.addInt16(3).addInt16(0) + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]) + } + + writer.addCString('client_encoding').addCString('UTF8') + + var bodyBuffer = writer.addCString('').flush() + // this message is sent without a code + + var length = bodyBuffer.length + 4 + + return new Writer().addInt32(length).add(bodyBuffer).flush() +} + +const requestSsl = (): Buffer => { + const response = Buffer.allocUnsafe(8) + response.writeInt32BE(8, 0) + response.writeInt32BE(80877103, 4) + return response +} + +const password = (password: string): Buffer => { + return writer.addCString(password).flush(code.startup) +} + +const sendSASLInitialResponseMessage = function (mechanism: string, initialResponse: string): Buffer { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse) + + return writer.flush(code.startup) +} + +const sendSCRAMClientFinalMessage = function (additionalData: string): Buffer { + return writer.addString(additionalData).flush(code.startup) +} + +const query = (text: string): Buffer => { + return writer.addCString(text).flush(code.query) +} + +type ParseOpts = { + name?: string + types?: number[] + text: string +} + +const emptyArray: any[] = [] + +const parse = (query: ParseOpts): Buffer => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + + // normalize missing query names to allow for null + const name = query.name || '' + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', name, name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + + const types = query.types || emptyArray + + var len = types.length + + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len) + + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]) + } + + return writer.flush(code.parse) +} + +type ValueMapper = (param: any, index: number) => any + +type BindOpts = { + portal?: string + binary?: boolean + statement?: string + values?: any[] + // optional map from JS value to postgres value per parameter + valueMapper?: ValueMapper +} + +const paramWriter = new Writer() + +// make this a const enum so typescript will inline the value +const enum ParamType { + STRING = 0, + BINARY = 1, +} + +const writeValues = function (values: any[], valueMapper?: ValueMapper): void { + for (let i = 0; i < values.length; i++) { + const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i] + if (mappedVal == null) { + // add the param type (string) to the writer + writer.addInt16(ParamType.STRING) + // write -1 to the param writer to indicate null + paramWriter.addInt32(-1) + } else if (mappedVal instanceof Buffer) { + // add the param type (binary) to the writer + writer.addInt16(ParamType.BINARY) + // add the buffer to the param writer + paramWriter.addInt32(mappedVal.length) + paramWriter.add(mappedVal) + } else { + // add the param type (string) to the writer + writer.addInt16(ParamType.STRING) + paramWriter.addInt32(Buffer.byteLength(mappedVal)) + paramWriter.addString(mappedVal) + } + } +} + +const bind = (config: BindOpts = {}): Buffer => { + // normalize config + const portal = config.portal || '' + const statement = config.statement || '' + const binary = config.binary || false + const values = config.values || emptyArray + const len = values.length + + writer.addCString(portal).addCString(statement) + writer.addInt16(len) + + writeValues(values, config.valueMapper) + + writer.addInt16(len) + writer.add(paramWriter.flush()) + + // format code + writer.addInt16(binary ? ParamType.BINARY : ParamType.STRING) + return writer.flush(code.bind) +} + +type ExecOpts = { + portal?: string + rows?: number +} + +const emptyExecute = Buffer.from([code.execute, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]) + +const execute = (config?: ExecOpts): Buffer => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute + } + + const portal = config.portal || '' + const rows = config.rows || 0 + + const portalLength = Buffer.byteLength(portal) + const len = 4 + portalLength + 1 + 4 + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len) + buff[0] = code.execute + buff.writeInt32BE(len, 1) + buff.write(portal, 5, 'utf-8') + buff[portalLength + 5] = 0 // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4) + return buff +} + +const cancel = (processID: number, secretKey: number): Buffer => { + const buffer = Buffer.allocUnsafe(16) + buffer.writeInt32BE(16, 0) + buffer.writeInt16BE(1234, 4) + buffer.writeInt16BE(5678, 6) + buffer.writeInt32BE(processID, 8) + buffer.writeInt32BE(secretKey, 12) + return buffer +} + +type PortalOpts = { + type: 'S' | 'P' + name?: string +} + +const cstringMessage = (code: code, string: string): Buffer => { + const stringLen = Buffer.byteLength(string) + const len = 4 + stringLen + 1 + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len) + buffer[0] = code + buffer.writeInt32BE(len, 1) + buffer.write(string, 5, 'utf-8') + buffer[len] = 0 // null terminate cString + return buffer +} + +const emptyDescribePortal = writer.addCString('P').flush(code.describe) +const emptyDescribeStatement = writer.addCString('S').flush(code.describe) + +const describe = (msg: PortalOpts): Buffer => { + return msg.name + ? cstringMessage(code.describe, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement +} + +const close = (msg: PortalOpts): Buffer => { + const text = `${msg.type}${msg.name || ''}` + return cstringMessage(code.close, text) +} + +const copyData = (chunk: Buffer): Buffer => { + return writer.add(chunk).flush(code.copyFromChunk) +} + +const copyFail = (message: string): Buffer => { + return cstringMessage(code.copyFail, message) +} + +const codeOnlyBuffer = (code: code): Buffer => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]) + +const flushBuffer = codeOnlyBuffer(code.flush) +const syncBuffer = codeOnlyBuffer(code.sync) +const endBuffer = codeOnlyBuffer(code.end) +const copyDoneBuffer = codeOnlyBuffer(code.copyDone) + +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +} + +export { serialize } diff --git a/reverse_engineering/node_modules/pg-protocol/src/testing/buffer-list.ts b/reverse_engineering/node_modules/pg-protocol/src/testing/buffer-list.ts new file mode 100644 index 0000000..15ac785 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/testing/buffer-list.ts @@ -0,0 +1,75 @@ +export default class BufferList { + constructor(public buffers: Buffer[] = []) {} + + public add(buffer: Buffer, front?: boolean) { + this.buffers[front ? 'unshift' : 'push'](buffer) + return this + } + + public addInt16(val: number, front?: boolean) { + return this.add(Buffer.from([val >>> 8, val >>> 0]), front) + } + + public getByteLength(initial?: number) { + return this.buffers.reduce(function (previous, current) { + return previous + current.length + }, initial || 0) + } + + public addInt32(val: number, first?: boolean) { + return this.add( + Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]), + first + ) + } + + public addCString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len + 1) + buffer.write(val) + buffer[len] = 0 + return this.add(buffer, front) + } + + public addString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len) + buffer.write(val) + return this.add(buffer, front) + } + + public addChar(char: string, first?: boolean) { + return this.add(Buffer.from(char, 'utf8'), first) + } + + public addByte(byte: number) { + return this.add(Buffer.from([byte])) + } + + public join(appendLength?: boolean, char?: string): Buffer { + var length = this.getByteLength() + if (appendLength) { + this.addInt32(length + 4, true) + return this.join(false, char) + } + if (char) { + this.addChar(char, true) + length++ + } + var result = Buffer.alloc(length) + var index = 0 + this.buffers.forEach(function (buffer) { + buffer.copy(result, index, 0) + index += buffer.length + }) + return result + } + + public static concat(): Buffer { + var total = new BufferList() + for (var i = 0; i < arguments.length; i++) { + total.add(arguments[i]) + } + return total.join() + } +} diff --git a/reverse_engineering/node_modules/pg-protocol/src/testing/test-buffers.ts b/reverse_engineering/node_modules/pg-protocol/src/testing/test-buffers.ts new file mode 100644 index 0000000..e0a04a7 --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/testing/test-buffers.ts @@ -0,0 +1,166 @@ +// http://developer.postgresql.org/pgdocs/postgres/protocol-message-formats.html +import BufferList from './buffer-list' + +const buffers = { + readyForQuery: function () { + return new BufferList().add(Buffer.from('I')).join(true, 'Z') + }, + + authenticationOk: function () { + return new BufferList().addInt32(0).join(true, 'R') + }, + + authenticationCleartextPassword: function () { + return new BufferList().addInt32(3).join(true, 'R') + }, + + authenticationMD5Password: function () { + return new BufferList() + .addInt32(5) + .add(Buffer.from([1, 2, 3, 4])) + .join(true, 'R') + }, + + authenticationSASL: function () { + return new BufferList().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R') + }, + + authenticationSASLContinue: function () { + return new BufferList().addInt32(11).addString('data').join(true, 'R') + }, + + authenticationSASLFinal: function () { + return new BufferList().addInt32(12).addString('data').join(true, 'R') + }, + + parameterStatus: function (name: string, value: string) { + return new BufferList().addCString(name).addCString(value).join(true, 'S') + }, + + backendKeyData: function (processID: number, secretKey: number) { + return new BufferList().addInt32(processID).addInt32(secretKey).join(true, 'K') + }, + + commandComplete: function (string: string) { + return new BufferList().addCString(string).join(true, 'C') + }, + + rowDescription: function (fields: any[]) { + fields = fields || [] + var buf = new BufferList() + buf.addInt16(fields.length) + fields.forEach(function (field) { + buf + .addCString(field.name) + .addInt32(field.tableID || 0) + .addInt16(field.attributeNumber || 0) + .addInt32(field.dataTypeID || 0) + .addInt16(field.dataTypeSize || 0) + .addInt32(field.typeModifier || 0) + .addInt16(field.formatCode || 0) + }) + return buf.join(true, 'T') + }, + + parameterDescription: function (dataTypeIDs: number[]) { + dataTypeIDs = dataTypeIDs || [] + var buf = new BufferList() + buf.addInt16(dataTypeIDs.length) + dataTypeIDs.forEach(function (dataTypeID) { + buf.addInt32(dataTypeID) + }) + return buf.join(true, 't') + }, + + dataRow: function (columns: any[]) { + columns = columns || [] + var buf = new BufferList() + buf.addInt16(columns.length) + columns.forEach(function (col) { + if (col == null) { + buf.addInt32(-1) + } else { + var strBuf = Buffer.from(col, 'utf8') + buf.addInt32(strBuf.length) + buf.add(strBuf) + } + }) + return buf.join(true, 'D') + }, + + error: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'E') + }, + + notice: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'N') + }, + + errorOrNotice: function (fields: any) { + fields = fields || [] + var buf = new BufferList() + fields.forEach(function (field: any) { + buf.addChar(field.type) + buf.addCString(field.value) + }) + return buf.add(Buffer.from([0])) // terminator + }, + + parseComplete: function () { + return new BufferList().join(true, '1') + }, + + bindComplete: function () { + return new BufferList().join(true, '2') + }, + + notification: function (id: number, channel: string, payload: string) { + return new BufferList().addInt32(id).addCString(channel).addCString(payload).join(true, 'A') + }, + + emptyQuery: function () { + return new BufferList().join(true, 'I') + }, + + portalSuspended: function () { + return new BufferList().join(true, 's') + }, + + closeComplete: function () { + return new BufferList().join(true, '3') + }, + + copyIn: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols) + for (let i = 0; i < cols; i++) { + list.addInt16(i) + } + return list.join(true, 'G') + }, + + copyOut: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols) + for (let i = 0; i < cols; i++) { + list.addInt16(i) + } + return list.join(true, 'H') + }, + + copyData: function (bytes: Buffer) { + return new BufferList().add(bytes).join(true, 'd') + }, + + copyDone: function () { + return new BufferList().join(true, 'c') + }, +} + +export default buffers diff --git a/reverse_engineering/node_modules/pg-protocol/src/types/chunky.d.ts b/reverse_engineering/node_modules/pg-protocol/src/types/chunky.d.ts new file mode 100644 index 0000000..7389bda --- /dev/null +++ b/reverse_engineering/node_modules/pg-protocol/src/types/chunky.d.ts @@ -0,0 +1 @@ +declare module 'chunky' diff --git a/reverse_engineering/node_modules/pg-types/.travis.yml b/reverse_engineering/node_modules/pg-types/.travis.yml new file mode 100644 index 0000000..dd6b033 --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - '4' + - 'lts/*' + - 'node' +env: + - PGUSER=postgres diff --git a/reverse_engineering/node_modules/pg-types/Makefile b/reverse_engineering/node_modules/pg-types/Makefile new file mode 100644 index 0000000..d7ec83d --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/Makefile @@ -0,0 +1,14 @@ +.PHONY: publish-patch test + +test: + npm test + +patch: test + npm version patch -m "Bump version" + git push origin master --tags + npm publish + +minor: test + npm version minor -m "Bump version" + git push origin master --tags + npm publish diff --git a/reverse_engineering/node_modules/pg-types/README.md b/reverse_engineering/node_modules/pg-types/README.md new file mode 100644 index 0000000..54a3f2c --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/README.md @@ -0,0 +1,75 @@ +# pg-types + +This is the code that turns all the raw text from postgres into JavaScript types for [node-postgres](https://github.com/brianc/node-postgres.git) + +## use + +This module is consumed and exported from the root `pg` object of node-postgres. To access it, do the following: + +```js +var types = require('pg').types +``` + +Generally what you'll want to do is override how a specific data-type is parsed and turned into a JavaScript type. By default the PostgreSQL backend server returns everything as strings. Every data type corresponds to a unique `OID` within the server, and these `OIDs` are sent back with the query response. So, you need to match a particluar `OID` to a function you'd like to use to take the raw text input and produce a valid JavaScript object as a result. `null` values are never parsed. + +Let's do something I commonly like to do on projects: return 64-bit integers `(int8)` as JavaScript integers. Because JavaScript doesn't have support for 64-bit integers node-postgres cannot confidently parse `int8` data type results as numbers because if you have a _huge_ number it will overflow and the result you'd get back from node-postgres would not be the result in the datbase. That would be a __very bad thing__ so node-postgres just returns `int8` results as strings and leaves the parsing up to you. Let's say that you know you don't and wont ever have numbers greater than `int4` in your database, but you're tired of recieving results from the `COUNT(*)` function as strings (because that function returns `int8`). You would do this: + +```js +var types = require('pg').types +types.setTypeParser(20, function(val) { + return parseInt(val) +}) +``` + +__boom__: now you get numbers instead of strings. + +Just as another example -- not saying this is a good idea -- let's say you want to return all dates from your database as [moment](http://momentjs.com/docs/) objects. Okay, do this: + +```js +var types = require('pg').types +var moment = require('moment') +var parseFn = function(val) { + return val === null ? null : moment(val) +} +types.setTypeParser(types.builtins.TIMESTAMPTZ, parseFn) +types.setTypeParser(types.builtins.TIMESTAMP, parseFn) +``` +_note: I've never done that with my dates, and I'm not 100% sure moment can parse all the date strings returned from postgres. It's just an example!_ + +If you're thinking "gee, this seems pretty handy, but how can I get a list of all the OIDs in the database and what they correspond to?!?!?!" worry not: + +```bash +$ psql -c "select typname, oid, typarray from pg_type order by oid" +``` + +If you want to find out the OID of a specific type: + +```bash +$ psql -c "select typname, oid, typarray from pg_type where typname = 'daterange' order by oid" +``` + +:smile: + +## license + +The MIT License (MIT) + +Copyright (c) 2014 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/pg-types/index.d.ts b/reverse_engineering/node_modules/pg-types/index.d.ts new file mode 100644 index 0000000..4bebcbe --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/index.d.ts @@ -0,0 +1,137 @@ +export enum TypeId { + BOOL = 16, + BYTEA = 17, + CHAR = 18, + INT8 = 20, + INT2 = 21, + INT4 = 23, + REGPROC = 24, + TEXT = 25, + OID = 26, + TID = 27, + XID = 28, + CID = 29, + JSON = 114, + XML = 142, + PG_NODE_TREE = 194, + SMGR = 210, + PATH = 602, + POLYGON = 604, + CIDR = 650, + FLOAT4 = 700, + FLOAT8 = 701, + ABSTIME = 702, + RELTIME = 703, + TINTERVAL = 704, + CIRCLE = 718, + MACADDR8 = 774, + MONEY = 790, + MACADDR = 829, + INET = 869, + ACLITEM = 1033, + BPCHAR = 1042, + VARCHAR = 1043, + DATE = 1082, + TIME = 1083, + TIMESTAMP = 1114, + TIMESTAMPTZ = 1184, + INTERVAL = 1186, + TIMETZ = 1266, + BIT = 1560, + VARBIT = 1562, + NUMERIC = 1700, + REFCURSOR = 1790, + REGPROCEDURE = 2202, + REGOPER = 2203, + REGOPERATOR = 2204, + REGCLASS = 2205, + REGTYPE = 2206, + UUID = 2950, + TXID_SNAPSHOT = 2970, + PG_LSN = 3220, + PG_NDISTINCT = 3361, + PG_DEPENDENCIES = 3402, + TSVECTOR = 3614, + TSQUERY = 3615, + GTSVECTOR = 3642, + REGCONFIG = 3734, + REGDICTIONARY = 3769, + JSONB = 3802, + REGNAMESPACE = 4089, + REGROLE = 4096 +} + +export type builtinsTypes = + 'BOOL' | + 'BYTEA' | + 'CHAR' | + 'INT8' | + 'INT2' | + 'INT4' | + 'REGPROC' | + 'TEXT' | + 'OID' | + 'TID' | + 'XID' | + 'CID' | + 'JSON' | + 'XML' | + 'PG_NODE_TREE' | + 'SMGR' | + 'PATH' | + 'POLYGON' | + 'CIDR' | + 'FLOAT4' | + 'FLOAT8' | + 'ABSTIME' | + 'RELTIME' | + 'TINTERVAL' | + 'CIRCLE' | + 'MACADDR8' | + 'MONEY' | + 'MACADDR' | + 'INET' | + 'ACLITEM' | + 'BPCHAR' | + 'VARCHAR' | + 'DATE' | + 'TIME' | + 'TIMESTAMP' | + 'TIMESTAMPTZ' | + 'INTERVAL' | + 'TIMETZ' | + 'BIT' | + 'VARBIT' | + 'NUMERIC' | + 'REFCURSOR' | + 'REGPROCEDURE' | + 'REGOPER' | + 'REGOPERATOR' | + 'REGCLASS' | + 'REGTYPE' | + 'UUID' | + 'TXID_SNAPSHOT' | + 'PG_LSN' | + 'PG_NDISTINCT' | + 'PG_DEPENDENCIES' | + 'TSVECTOR' | + 'TSQUERY' | + 'GTSVECTOR' | + 'REGCONFIG' | + 'REGDICTIONARY' | + 'JSONB' | + 'REGNAMESPACE' | + 'REGROLE'; + +export type TypesBuiltins = {[key in builtinsTypes]: TypeId}; + +export type TypeFormat = 'text' | 'binary'; + +export const builtins: TypesBuiltins; + +export function setTypeParser (id: TypeId, parseFn: ((value: string) => any)): void; +export function setTypeParser (id: TypeId, format: TypeFormat, parseFn: (value: string) => any): void; + +export const getTypeParser: (id: TypeId, format?: TypeFormat) => any + +export const arrayParser: (source: string, transform: (entry: any) => any) => any[]; diff --git a/reverse_engineering/node_modules/pg-types/index.js b/reverse_engineering/node_modules/pg-types/index.js new file mode 100644 index 0000000..952d8c2 --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/index.js @@ -0,0 +1,47 @@ +var textParsers = require('./lib/textParsers'); +var binaryParsers = require('./lib/binaryParsers'); +var arrayParser = require('./lib/arrayParser'); +var builtinTypes = require('./lib/builtins'); + +exports.getTypeParser = getTypeParser; +exports.setTypeParser = setTypeParser; +exports.arrayParser = arrayParser; +exports.builtins = builtinTypes; + +var typeParsers = { + text: {}, + binary: {} +}; + +//the empty parse function +function noParse (val) { + return String(val); +}; + +//returns a function used to convert a specific type (specified by +//oid) into a result javascript type +//note: the oid can be obtained via the following sql query: +//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE'; +function getTypeParser (oid, format) { + format = format || 'text'; + if (!typeParsers[format]) { + return noParse; + } + return typeParsers[format][oid] || noParse; +}; + +function setTypeParser (oid, format, parseFn) { + if(typeof format == 'function') { + parseFn = format; + format = 'text'; + } + typeParsers[format][oid] = parseFn; +}; + +textParsers.init(function(oid, converter) { + typeParsers.text[oid] = converter; +}); + +binaryParsers.init(function(oid, converter) { + typeParsers.binary[oid] = converter; +}); diff --git a/reverse_engineering/node_modules/pg-types/index.test-d.ts b/reverse_engineering/node_modules/pg-types/index.test-d.ts new file mode 100644 index 0000000..d530e6e --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/index.test-d.ts @@ -0,0 +1,21 @@ +import * as types from '.'; +import { expectType } from 'tsd'; + +// builtins +expectType(types.builtins); + +// getTypeParser +const noParse = types.getTypeParser(types.builtins.NUMERIC, 'text'); +const numericParser = types.getTypeParser(types.builtins.NUMERIC, 'binary'); +expectType(noParse('noParse')); +expectType(numericParser([200, 1, 0, 15])); + +// getArrayParser +const value = types.arrayParser('{1,2,3}', (num) => parseInt(num)); +expectType(value); + +//setTypeParser +types.setTypeParser(types.builtins.INT8, parseInt); +types.setTypeParser(types.builtins.FLOAT8, parseFloat); +types.setTypeParser(types.builtins.FLOAT8, 'binary', (data) => data[0]); +types.setTypeParser(types.builtins.FLOAT8, 'text', parseFloat); diff --git a/reverse_engineering/node_modules/pg-types/lib/arrayParser.js b/reverse_engineering/node_modules/pg-types/lib/arrayParser.js new file mode 100644 index 0000000..81ccffb --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/lib/arrayParser.js @@ -0,0 +1,11 @@ +var array = require('postgres-array'); + +module.exports = { + create: function (source, transform) { + return { + parse: function() { + return array.parse(source, transform); + } + }; + } +}; diff --git a/reverse_engineering/node_modules/pg-types/lib/binaryParsers.js b/reverse_engineering/node_modules/pg-types/lib/binaryParsers.js new file mode 100644 index 0000000..e12c2f4 --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/lib/binaryParsers.js @@ -0,0 +1,257 @@ +var parseInt64 = require('pg-int8'); + +var parseBits = function(data, bits, offset, invert, callback) { + offset = offset || 0; + invert = invert || false; + callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; }; + var offsetBytes = offset >> 3; + + var inv = function(value) { + if (invert) { + return ~value & 0xff; + } + + return value; + }; + + // read first (maybe partial) byte + var mask = 0xff; + var firstBits = 8 - (offset % 8); + if (bits < firstBits) { + mask = (0xff << (8 - bits)) & 0xff; + firstBits = bits; + } + + if (offset) { + mask = mask >> (offset % 8); + } + + var result = 0; + if ((offset % 8) + bits >= 8) { + result = callback(0, inv(data[offsetBytes]) & mask, firstBits); + } + + // read bytes + var bytes = (bits + offset) >> 3; + for (var i = offsetBytes + 1; i < bytes; i++) { + result = callback(result, inv(data[i]), 8); + } + + // bits to read, that are not a complete byte + var lastBits = (bits + offset) % 8; + if (lastBits > 0) { + result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); + } + + return result; +}; + +var parseFloatFromBits = function(data, precisionBits, exponentBits) { + var bias = Math.pow(2, exponentBits - 1) - 1; + var sign = parseBits(data, 1); + var exponent = parseBits(data, exponentBits, 1); + + if (exponent === 0) { + return 0; + } + + // parse mantissa + var precisionBitsCounter = 1; + var parsePrecisionBits = function(lastValue, newValue, bits) { + if (lastValue === 0) { + lastValue = 1; + } + + for (var i = 1; i <= bits; i++) { + precisionBitsCounter /= 2; + if ((newValue & (0x1 << (bits - i))) > 0) { + lastValue += precisionBitsCounter; + } + } + + return lastValue; + }; + + var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); + + // special cases + if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { + if (mantissa === 0) { + return (sign === 0) ? Infinity : -Infinity; + } + + return NaN; + } + + // normale number + return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; +}; + +var parseInt16 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 15, 1, true) + 1); + } + + return parseBits(value, 15, 1); +}; + +var parseInt32 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 31, 1, true) + 1); + } + + return parseBits(value, 31, 1); +}; + +var parseFloat32 = function(value) { + return parseFloatFromBits(value, 23, 8); +}; + +var parseFloat64 = function(value) { + return parseFloatFromBits(value, 52, 11); +}; + +var parseNumeric = function(value) { + var sign = parseBits(value, 16, 32); + if (sign == 0xc000) { + return NaN; + } + + var weight = Math.pow(10000, parseBits(value, 16, 16)); + var result = 0; + + var digits = []; + var ndigits = parseBits(value, 16); + for (var i = 0; i < ndigits; i++) { + result += parseBits(value, 16, 64 + (16 * i)) * weight; + weight /= 10000; + } + + var scale = Math.pow(10, parseBits(value, 16, 48)); + return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; +}; + +var parseDate = function(isUTC, value) { + var sign = parseBits(value, 1); + var rawValue = parseBits(value, 63, 1); + + // discard usecs and shift from 2000 to 1970 + var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); + + if (!isUTC) { + result.setTime(result.getTime() + result.getTimezoneOffset() * 60000); + } + + // add microseconds to the date + result.usec = rawValue % 1000; + result.getMicroSeconds = function() { + return this.usec; + }; + result.setMicroSeconds = function(value) { + this.usec = value; + }; + result.getUTCMicroSeconds = function() { + return this.usec; + }; + + return result; +}; + +var parseArray = function(value) { + var dim = parseBits(value, 32); + + var flags = parseBits(value, 32, 32); + var elementType = parseBits(value, 32, 64); + + var offset = 96; + var dims = []; + for (var i = 0; i < dim; i++) { + // parse dimension + dims[i] = parseBits(value, 32, offset); + offset += 32; + + // ignore lower bounds + offset += 32; + } + + var parseElement = function(elementType) { + // parse content length + var length = parseBits(value, 32, offset); + offset += 32; + + // parse null values + if (length == 0xffffffff) { + return null; + } + + var result; + if ((elementType == 0x17) || (elementType == 0x14)) { + // int/bigint + result = parseBits(value, length * 8, offset); + offset += length * 8; + return result; + } + else if (elementType == 0x19) { + // string + result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); + return result; + } + else { + console.log("ERROR: ElementType not implemented: " + elementType); + } + }; + + var parse = function(dimension, elementType) { + var array = []; + var i; + + if (dimension.length > 1) { + var count = dimension.shift(); + for (i = 0; i < count; i++) { + array[i] = parse(dimension, elementType); + } + dimension.unshift(count); + } + else { + for (i = 0; i < dimension[0]; i++) { + array[i] = parseElement(elementType); + } + } + + return array; + }; + + return parse(dims, elementType); +}; + +var parseText = function(value) { + return value.toString('utf8'); +}; + +var parseBool = function(value) { + if(value === null) return null; + return (parseBits(value, 8) > 0); +}; + +var init = function(register) { + register(20, parseInt64); + register(21, parseInt16); + register(23, parseInt32); + register(26, parseInt32); + register(1700, parseNumeric); + register(700, parseFloat32); + register(701, parseFloat64); + register(16, parseBool); + register(1114, parseDate.bind(null, false)); + register(1184, parseDate.bind(null, true)); + register(1000, parseArray); + register(1007, parseArray); + register(1016, parseArray); + register(1008, parseArray); + register(1009, parseArray); + register(25, parseText); +}; + +module.exports = { + init: init +}; diff --git a/reverse_engineering/node_modules/pg-types/lib/builtins.js b/reverse_engineering/node_modules/pg-types/lib/builtins.js new file mode 100644 index 0000000..f0c134a --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/lib/builtins.js @@ -0,0 +1,73 @@ +/** + * Following query was used to generate this file: + + SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid) + FROM pg_type PT + WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable) + AND typtype = 'b' -- Only basic types + AND typelem = 0 -- Ignore aliases + AND typisdefined -- Ignore undefined types + */ + +module.exports = { + BOOL: 16, + BYTEA: 17, + CHAR: 18, + INT8: 20, + INT2: 21, + INT4: 23, + REGPROC: 24, + TEXT: 25, + OID: 26, + TID: 27, + XID: 28, + CID: 29, + JSON: 114, + XML: 142, + PG_NODE_TREE: 194, + SMGR: 210, + PATH: 602, + POLYGON: 604, + CIDR: 650, + FLOAT4: 700, + FLOAT8: 701, + ABSTIME: 702, + RELTIME: 703, + TINTERVAL: 704, + CIRCLE: 718, + MACADDR8: 774, + MONEY: 790, + MACADDR: 829, + INET: 869, + ACLITEM: 1033, + BPCHAR: 1042, + VARCHAR: 1043, + DATE: 1082, + TIME: 1083, + TIMESTAMP: 1114, + TIMESTAMPTZ: 1184, + INTERVAL: 1186, + TIMETZ: 1266, + BIT: 1560, + VARBIT: 1562, + NUMERIC: 1700, + REFCURSOR: 1790, + REGPROCEDURE: 2202, + REGOPER: 2203, + REGOPERATOR: 2204, + REGCLASS: 2205, + REGTYPE: 2206, + UUID: 2950, + TXID_SNAPSHOT: 2970, + PG_LSN: 3220, + PG_NDISTINCT: 3361, + PG_DEPENDENCIES: 3402, + TSVECTOR: 3614, + TSQUERY: 3615, + GTSVECTOR: 3642, + REGCONFIG: 3734, + REGDICTIONARY: 3769, + JSONB: 3802, + REGNAMESPACE: 4089, + REGROLE: 4096 +}; diff --git a/reverse_engineering/node_modules/pg-types/lib/textParsers.js b/reverse_engineering/node_modules/pg-types/lib/textParsers.js new file mode 100644 index 0000000..b1218bf --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/lib/textParsers.js @@ -0,0 +1,215 @@ +var array = require('postgres-array') +var arrayParser = require('./arrayParser'); +var parseDate = require('postgres-date'); +var parseInterval = require('postgres-interval'); +var parseByteA = require('postgres-bytea'); + +function allowNull (fn) { + return function nullAllowed (value) { + if (value === null) return value + return fn(value) + } +} + +function parseBool (value) { + if (value === null) return value + return value === 'TRUE' || + value === 't' || + value === 'true' || + value === 'y' || + value === 'yes' || + value === 'on' || + value === '1'; +} + +function parseBoolArray (value) { + if (!value) return null + return array.parse(value, parseBool) +} + +function parseBaseTenInt (string) { + return parseInt(string, 10) +} + +function parseIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(parseBaseTenInt)) +} + +function parseBigIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(function (entry) { + return parseBigInteger(entry).trim() + })) +} + +var parsePointArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parsePoint(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseFloatArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parseFloat(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseStringArray = function(value) { + if(!value) { return null; } + + var p = arrayParser.create(value); + return p.parse(); +}; + +var parseDateArray = function(value) { + if (!value) { return null; } + + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseDate(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseIntervalArray = function(value) { + if (!value) { return null; } + + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseInterval(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseByteAArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(parseByteA)); +}; + +var parseInteger = function(value) { + return parseInt(value, 10); +}; + +var parseBigInteger = function(value) { + var valStr = String(value); + if (/^\d+$/.test(valStr)) { return valStr; } + return value; +}; + +var parseJsonArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(JSON.parse)); +}; + +var parsePoint = function(value) { + if (value[0] !== '(') { return null; } + + value = value.substring( 1, value.length - 1 ).split(','); + + return { + x: parseFloat(value[0]) + , y: parseFloat(value[1]) + }; +}; + +var parseCircle = function(value) { + if (value[0] !== '<' && value[1] !== '(') { return null; } + + var point = '('; + var radius = ''; + var pointParsed = false; + for (var i = 2; i < value.length - 1; i++){ + if (!pointParsed) { + point += value[i]; + } + + if (value[i] === ')') { + pointParsed = true; + continue; + } else if (!pointParsed) { + continue; + } + + if (value[i] === ','){ + continue; + } + + radius += value[i]; + } + var result = parsePoint(point); + result.radius = parseFloat(radius); + + return result; +}; + +var init = function(register) { + register(20, parseBigInteger); // int8 + register(21, parseInteger); // int2 + register(23, parseInteger); // int4 + register(26, parseInteger); // oid + register(700, parseFloat); // float4/real + register(701, parseFloat); // float8/double + register(16, parseBool); + register(1082, parseDate); // date + register(1114, parseDate); // timestamp without timezone + register(1184, parseDate); // timestamp + register(600, parsePoint); // point + register(651, parseStringArray); // cidr[] + register(718, parseCircle); // circle + register(1000, parseBoolArray); + register(1001, parseByteAArray); + register(1005, parseIntegerArray); // _int2 + register(1007, parseIntegerArray); // _int4 + register(1028, parseIntegerArray); // oid[] + register(1016, parseBigIntegerArray); // _int8 + register(1017, parsePointArray); // point[] + register(1021, parseFloatArray); // _float4 + register(1022, parseFloatArray); // _float8 + register(1231, parseFloatArray); // _numeric + register(1014, parseStringArray); //char + register(1015, parseStringArray); //varchar + register(1008, parseStringArray); + register(1009, parseStringArray); + register(1040, parseStringArray); // macaddr[] + register(1041, parseStringArray); // inet[] + register(1115, parseDateArray); // timestamp without time zone[] + register(1182, parseDateArray); // _date + register(1185, parseDateArray); // timestamp with time zone[] + register(1186, parseInterval); + register(1187, parseIntervalArray); + register(17, parseByteA); + register(114, JSON.parse.bind(JSON)); // json + register(3802, JSON.parse.bind(JSON)); // jsonb + register(199, parseJsonArray); // json[] + register(3807, parseJsonArray); // jsonb[] + register(3907, parseStringArray); // numrange[] + register(2951, parseStringArray); // uuid[] + register(791, parseStringArray); // money[] + register(1183, parseStringArray); // time[] + register(1270, parseStringArray); // timetz[] +}; + +module.exports = { + init: init +}; diff --git a/reverse_engineering/node_modules/pg-types/package.json b/reverse_engineering/node_modules/pg-types/package.json new file mode 100644 index 0000000..4a904aa --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/package.json @@ -0,0 +1,69 @@ +{ + "_from": "pg-types@^2.1.0", + "_id": "pg-types@2.2.0", + "_inBundle": false, + "_integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "_location": "/pg-types", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-types@^2.1.0", + "name": "pg-types", + "escapedName": "pg-types", + "rawSpec": "^2.1.0", + "saveSpec": null, + "fetchSpec": "^2.1.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "_shasum": "2d0250d636454f7cfa3b6ae0382fdfa8063254a3", + "_spec": "pg-types@^2.1.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-pg-types/issues" + }, + "bundleDependencies": false, + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "deprecated": false, + "description": "Query result type converters for node-postgres", + "devDependencies": { + "if-node-version": "^1.1.1", + "pff": "^1.0.0", + "tap-spec": "^4.0.0", + "tape": "^4.0.0", + "tsd": "^0.7.4" + }, + "engines": { + "node": ">=4" + }, + "homepage": "https://github.com/brianc/node-pg-types", + "keywords": [ + "postgres", + "PostgreSQL", + "pg" + ], + "license": "MIT", + "main": "index.js", + "name": "pg-types", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-pg-types.git" + }, + "scripts": { + "test": "tape test/*.js | tap-spec && npm run test-ts", + "test-ts": "if-node-version '>= 8' tsd" + }, + "version": "2.2.0" +} diff --git a/reverse_engineering/node_modules/pg-types/test/index.js b/reverse_engineering/node_modules/pg-types/test/index.js new file mode 100644 index 0000000..b7d05cd --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/test/index.js @@ -0,0 +1,24 @@ + +var test = require('tape') +var printf = require('pff') +var getTypeParser = require('../').getTypeParser +var types = require('./types') + +test('types', function (t) { + Object.keys(types).forEach(function (typeName) { + var type = types[typeName] + t.test(typeName, function (t) { + var parser = getTypeParser(type.id, type.format) + type.tests.forEach(function (tests) { + var input = tests[0] + var expected = tests[1] + var result = parser(input) + if (typeof expected === 'function') { + return expected(t, result) + } + t.equal(result, expected) + }) + t.end() + }) + }) +}) diff --git a/reverse_engineering/node_modules/pg-types/test/types.js b/reverse_engineering/node_modules/pg-types/test/types.js new file mode 100644 index 0000000..af708a5 --- /dev/null +++ b/reverse_engineering/node_modules/pg-types/test/types.js @@ -0,0 +1,597 @@ +'use strict' + +exports['string/varchar'] = { + format: 'text', + id: 1043, + tests: [ + ['bang', 'bang'] + ] +} + +exports['integer/int4'] = { + format: 'text', + id: 23, + tests: [ + ['2147483647', 2147483647] + ] +} + +exports['smallint/int2'] = { + format: 'text', + id: 21, + tests: [ + ['32767', 32767] + ] +} + +exports['bigint/int8'] = { + format: 'text', + id: 20, + tests: [ + ['9223372036854775807', '9223372036854775807'] + ] +} + +exports.oid = { + format: 'text', + id: 26, + tests: [ + ['103', 103] + ] +} + +var bignum = '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628' +exports.numeric = { + format: 'text', + id: 1700, + tests: [ + [bignum, bignum] + ] +} + +exports['real/float4'] = { + format: 'text', + id: 700, + tests: [ + ['123.456', 123.456] + ] +} + +exports['double precision / float 8'] = { + format: 'text', + id: 701, + tests: [ + ['12345678.12345678', 12345678.12345678] + ] +} + +exports.boolean = { + format: 'text', + id: 16, + tests: [ + ['TRUE', true], + ['t', true], + ['true', true], + ['y', true], + ['yes', true], + ['on', true], + ['1', true], + ['f', false], + [null, null] + ] +} + +exports.timestamptz = { + format: 'text', + id: 1184, + tests: [ + [ + '2010-10-31 14:54:13.74-05:30', + dateEquals(2010, 9, 31, 20, 24, 13, 740) + ], + [ + '2011-01-23 22:05:00.68-06', + dateEquals(2011, 0, 24, 4, 5, 0, 680) + ], + [ + '2010-10-30 14:11:12.730838Z', + dateEquals(2010, 9, 30, 14, 11, 12, 730) + ], + [ + '2010-10-30 13:10:01+05', + dateEquals(2010, 9, 30, 8, 10, 1, 0) + ] + ] +} + +exports.timestamp = { + format: 'text', + id: 1114, + tests: [ + [ + '2010-10-31 00:00:00', + function (t, value) { + t.equal( + value.toUTCString(), + new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString() + ) + t.equal( + value.toString(), + new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString() + ) + } + ] + ] +} + +exports.date = { + format: 'text', + id: 1082, + tests: [ + ['2010-10-31', function (t, value) { + var now = new Date(2010, 9, 31) + dateEquals( + 2010, + now.getUTCMonth(), + now.getUTCDate(), + now.getUTCHours(), 0, 0, 0)(t, value) + t.equal(value.getHours(), now.getHours()) + }] + ] +} + +exports.inet = { + format: 'text', + id: 869, + tests: [ + ['8.8.8.8', '8.8.8.8'], + ['2001:4860:4860::8888', '2001:4860:4860::8888'], + ['127.0.0.1', '127.0.0.1'], + ['fd00:1::40e', 'fd00:1::40e'], + ['1.2.3.4', '1.2.3.4'] + ] +} + +exports.cidr = { + format: 'text', + id: 650, + tests: [ + ['172.16.0.0/12', '172.16.0.0/12'], + ['fe80::/10', 'fe80::/10'], + ['fc00::/7', 'fc00::/7'], + ['192.168.0.0/24', '192.168.0.0/24'], + ['10.0.0.0/8', '10.0.0.0/8'] + ] +} + +exports.macaddr = { + format: 'text', + id: 829, + tests: [ + ['08:00:2b:01:02:03', '08:00:2b:01:02:03'], + ['16:10:9f:0d:66:00', '16:10:9f:0d:66:00'] + ] +} + +exports.numrange = { + format: 'text', + id: 3906, + tests: [ + ['[,]', '[,]'], + ['(,)', '(,)'], + ['(,]', '(,]'], + ['[1,)', '[1,)'], + ['[,1]', '[,1]'], + ['(1,2)', '(1,2)'], + ['(1,20.5]', '(1,20.5]'] + ] +} + +exports.interval = { + format: 'text', + id: 1186, + tests: [ + ['01:02:03', function (t, value) { + t.equal(value.toPostgres(), '3 seconds 2 minutes 1 hours') + t.deepEqual(value, {hours: 1, minutes: 2, seconds: 3}) + }], + ['01:02:03.456', function (t, value) { + t.deepEqual(value, {hours: 1, minutes:2, seconds: 3, milliseconds: 456}) + }], + ['1 year -32 days', function (t, value) { + t.equal(value.toPostgres(), '-32 days 1 years') + t.deepEqual(value, {years: 1, days: -32}) + }], + ['1 day -00:00:03', function (t, value) { + t.equal(value.toPostgres(), '-3 seconds 1 days') + t.deepEqual(value, {days: 1, seconds: -3}) + }] + ] +} + +exports.bytea = { + format: 'text', + id: 17, + tests: [ + ['foo\\000\\200\\\\\\377', function (t, value) { + var buffer = new Buffer([102, 111, 111, 0, 128, 92, 255]) + t.ok(buffer.equals(value)) + }], + ['', function (t, value) { + var buffer = new Buffer(0) + t.ok(buffer.equals(value)) + }] + ] +} + +exports['array/boolean'] = { + format: 'text', + id: 1000, + tests: [ + ['{true,false}', function (t, value) { + t.deepEqual(value, [true, false]) + }] + ] +} + +exports['array/char'] = { + format: 'text', + id: 1014, + tests: [ + ['{foo,bar}', function (t, value) { + t.deepEqual(value, ['foo', 'bar']) + }] + ] +} + +exports['array/varchar'] = { + format: 'text', + id: 1015, + tests: [ + ['{foo,bar}', function (t, value) { + t.deepEqual(value, ['foo', 'bar']) + }] + ] +} + +exports['array/text'] = { + format: 'text', + id: 1008, + tests: [ + ['{foo}', function (t, value) { + t.deepEqual(value, ['foo']) + }] + ] +} + +exports['array/bytea'] = { + format: 'text', + id: 1001, + tests: [ + ['{"\\\\x00000000"}', function (t, value) { + var buffer = new Buffer('00000000', 'hex') + t.ok(Array.isArray(value)) + t.equal(value.length, 1) + t.ok(buffer.equals(value[0])) + }], + ['{NULL,"\\\\x4e554c4c"}', function (t, value) { + var buffer = new Buffer('4e554c4c', 'hex') + t.ok(Array.isArray(value)) + t.equal(value.length, 2) + t.equal(value[0], null) + t.ok(buffer.equals(value[1])) + }], + ] +} + +exports['array/numeric'] = { + format: 'text', + id: 1231, + tests: [ + ['{1.2,3.4}', function (t, value) { + t.deepEqual(value, [1.2, 3.4]) + }] + ] +} + +exports['array/int2'] = { + format: 'text', + id: 1005, + tests: [ + ['{-32768, -32767, 32766, 32767}', function (t, value) { + t.deepEqual(value, [-32768, -32767, 32766, 32767]) + }] + ] +} + +exports['array/int4'] = { + format: 'text', + id: 1005, + tests: [ + ['{-2147483648, -2147483647, 2147483646, 2147483647}', function (t, value) { + t.deepEqual(value, [-2147483648, -2147483647, 2147483646, 2147483647]) + }] + ] +} + +exports['array/int8'] = { + format: 'text', + id: 1016, + tests: [ + [ + '{-9223372036854775808, -9223372036854775807, 9223372036854775806, 9223372036854775807}', + function (t, value) { + t.deepEqual(value, [ + '-9223372036854775808', + '-9223372036854775807', + '9223372036854775806', + '9223372036854775807' + ]) + } + ] + ] +} + +exports['array/json'] = { + format: 'text', + id: 199, + tests: [ + [ + '{{1,2},{[3],"[4,5]"},{null,NULL}}', + function (t, value) { + t.deepEqual(value, [ + [1, 2], + [[3], [4, 5]], + [null, null], + ]) + } + ] + ] +} + +exports['array/jsonb'] = { + format: 'text', + id: 3807, + tests: exports['array/json'].tests +} + +exports['array/point'] = { + format: 'text', + id: 1017, + tests: [ + ['{"(25.1,50.5)","(10.1,40)"}', function (t, value) { + t.deepEqual(value, [{x: 25.1, y: 50.5}, {x: 10.1, y: 40}]) + }] + ] +} + +exports['array/oid'] = { + format: 'text', + id: 1028, + tests: [ + ['{25864,25860}', function (t, value) { + t.deepEqual(value, [25864, 25860]) + }] + ] +} + +exports['array/float4'] = { + format: 'text', + id: 1021, + tests: [ + ['{1.2, 3.4}', function (t, value) { + t.deepEqual(value, [1.2, 3.4]) + }] + ] +} + +exports['array/float8'] = { + format: 'text', + id: 1022, + tests: [ + ['{-12345678.1234567, 12345678.12345678}', function (t, value) { + t.deepEqual(value, [-12345678.1234567, 12345678.12345678]) + }] + ] +} + +exports['array/date'] = { + format: 'text', + id: 1182, + tests: [ + ['{2014-01-01,2015-12-31}', function (t, value) { + var expecteds = [new Date(2014, 0, 1), new Date(2015, 11, 31)] + t.equal(value.length, 2) + value.forEach(function (date, index) { + var expected = expecteds[index] + dateEquals( + expected.getUTCFullYear(), + expected.getUTCMonth(), + expected.getUTCDate(), + expected.getUTCHours(), 0, 0, 0)(t, date) + }) + }] + ] +} + +exports['array/interval'] = { + format: 'text', + id: 1187, + tests: [ + ['{01:02:03,1 day -00:00:03}', function (t, value) { + var expecteds = [{hours: 1, minutes: 2, seconds: 3}, + {days: 1, seconds: -3}] + t.equal(value.length, 2) + t.deepEqual(value, expecteds); + }] + ] +} + +exports['array/inet'] = { + format: 'text', + id: 1041, + tests: [ + ['{8.8.8.8}', function (t, value) { + t.deepEqual(value, ['8.8.8.8']); + }], + ['{2001:4860:4860::8888}', function (t, value) { + t.deepEqual(value, ['2001:4860:4860::8888']); + }], + ['{127.0.0.1,fd00:1::40e,1.2.3.4}', function (t, value) { + t.deepEqual(value, ['127.0.0.1', 'fd00:1::40e', '1.2.3.4']); + }] + ] +} + +exports['array/cidr'] = { + format: 'text', + id: 651, + tests: [ + ['{172.16.0.0/12}', function (t, value) { + t.deepEqual(value, ['172.16.0.0/12']); + }], + ['{fe80::/10}', function (t, value) { + t.deepEqual(value, ['fe80::/10']); + }], + ['{10.0.0.0/8,fc00::/7,192.168.0.0/24}', function (t, value) { + t.deepEqual(value, ['10.0.0.0/8', 'fc00::/7', '192.168.0.0/24']); + }] + ] +} + +exports['array/macaddr'] = { + format: 'text', + id: 1040, + tests: [ + ['{08:00:2b:01:02:03,16:10:9f:0d:66:00}', function (t, value) { + t.deepEqual(value, ['08:00:2b:01:02:03', '16:10:9f:0d:66:00']); + }] + ] +} + +exports['array/numrange'] = { + format: 'text', + id: 3907, + tests: [ + ['{"[1,2]","(4.5,8)","[10,40)","(-21.2,60.3]"}', function (t, value) { + t.deepEqual(value, ['[1,2]', '(4.5,8)', '[10,40)', '(-21.2,60.3]']); + }], + ['{"[,20]","[3,]","[,]","(,35)","(1,)","(,)"}', function (t, value) { + t.deepEqual(value, ['[,20]', '[3,]', '[,]', '(,35)', '(1,)', '(,)']); + }], + ['{"[,20)","[3,)","[,)","[,35)","[1,)","[,)"}', function (t, value) { + t.deepEqual(value, ['[,20)', '[3,)', '[,)', '[,35)', '[1,)', '[,)']); + }] + ] +} + +exports['binary-string/varchar'] = { + format: 'binary', + id: 1043, + tests: [ + ['bang', 'bang'] + ] +} + +exports['binary-integer/int4'] = { + format: 'binary', + id: 23, + tests: [ + [[0, 0, 0, 100], 100] + ] +} + +exports['binary-smallint/int2'] = { + format: 'binary', + id: 21, + tests: [ + [[0, 101], 101] + ] +} + +exports['binary-bigint/int8'] = { + format: 'binary', + id: 20, + tests: [ + [new Buffer([0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), '9223372036854775807'] + ] +} + +exports['binary-oid'] = { + format: 'binary', + id: 26, + tests: [ + [[0, 0, 0, 103], 103] + ] +} + +exports['binary-numeric'] = { + format: 'binary', + id: 1700, + tests: [ + [ + [0, 2, 0, 0, 0, 0, 0, hex('0x64'), 0, 12, hex('0xd'), hex('0x48'), 0, 0, 0, 0], + 12.34 + ] + ] +} + +exports['binary-real/float4'] = { + format: 'binary', + id: 700, + tests: [ + [['0x41', '0x48', '0x00', '0x00'].map(hex), 12.5] + ] +} + +exports['binary-boolean'] = { + format: 'binary', + id: 16, + tests: [ + [[1], true], + [[0], false], + [null, null] + ] +} + +exports['binary-string'] = { + format: 'binary', + id: 25, + tests: [ + [ + new Buffer(['0x73', '0x6c', '0x61', '0x64', '0x64', '0x61'].map(hex)), + 'sladda' + ] + ] +} + +exports.point = { + format: 'text', + id: 600, + tests: [ + ['(25.1,50.5)', function (t, value) { + t.deepEqual(value, {x: 25.1, y: 50.5}) + }] + ] +} + +exports.circle = { + format: 'text', + id: 718, + tests: [ + ['<(25,10),5>', function (t, value) { + t.deepEqual(value, {x: 25, y: 10, radius: 5}) + }] + ] +} + +function hex (string) { + return parseInt(string, 16) +} + +function dateEquals () { + var timestamp = Date.UTC.apply(Date, arguments) + return function (t, value) { + t.equal(value.toUTCString(), new Date(timestamp).toUTCString()) + } +} diff --git a/reverse_engineering/node_modules/pg/LICENSE b/reverse_engineering/node_modules/pg/LICENSE new file mode 100644 index 0000000..5c14056 --- /dev/null +++ b/reverse_engineering/node_modules/pg/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2021 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/reverse_engineering/node_modules/pg/README.md b/reverse_engineering/node_modules/pg/README.md new file mode 100644 index 0000000..e5fcf02 --- /dev/null +++ b/reverse_engineering/node_modules/pg/README.md @@ -0,0 +1,101 @@ +# node-postgres + +[![Build Status](https://secure.travis-ci.org/brianc/node-postgres.svg?branch=master)](http://travis-ci.org/brianc/node-postgres) +[![Dependency Status](https://david-dm.org/brianc/node-postgres.svg?path=packages/pg)](https://david-dm.org/brianc/node-postgres?path=packages/pg) +NPM version +NPM downloads + +Non-blocking PostgreSQL client for Node.js. Pure JavaScript and optional native libpq bindings. + +## Install + +```sh +$ npm install pg +``` + +--- + +## :star: [Documentation](https://node-postgres.com) :star: + +### Features + +- Pure JavaScript client and native libpq bindings share _the same API_ +- Connection pooling +- Extensible JS ↔ PostgreSQL data-type coercion +- Supported PostgreSQL features + - Parameterized queries + - Named statements with query plan caching + - Async notifications with `LISTEN/NOTIFY` + - Bulk import & export with `COPY TO/COPY FROM` + +### Extras + +node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture. +The entire list can be found on our [wiki](https://github.com/brianc/node-postgres/wiki/Extras). + +## Support + +node-postgres is free software. If you encounter a bug with the library please open an issue on the [GitHub repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better! + +When you open an issue please provide: + +- version of Node +- version of Postgres +- smallest possible snippet of code to reproduce the problem + +You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on Twitter. + +## Sponsorship :two_hearts: + +node-postgres's continued development has been made possible in part by generous finanical support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md) and these featured sponsors: + +
+ + + + + + + + +
+ +If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development. + +## Contributing + +**:heart: contributions!** + +I will **happily** accept your pull request if it: + +- **has tests** +- looks reasonable +- does not break backwards compatibility + +If your change involves breaking backwards compatibility please please point that out in the pull request & we can discuss & plan when and how to release it and what type of documentation or communicate it will require. + +## Troubleshooting and FAQ + +The causes and solutions to common errors can be found among the [Frequently Asked Questions (FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ) + +## License + +Copyright (c) 2010-2020 Brian Carlson (brian.m.carlson@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/pg/lib/client.js b/reverse_engineering/node_modules/pg/lib/client.js new file mode 100644 index 0000000..589aa9f --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/client.js @@ -0,0 +1,621 @@ +'use strict' + +var EventEmitter = require('events').EventEmitter +var util = require('util') +var utils = require('./utils') +var sasl = require('./sasl') +var pgPass = require('pgpass') +var TypeOverrides = require('./type-overrides') + +var ConnectionParameters = require('./connection-parameters') +var Query = require('./query') +var defaults = require('./defaults') +var Connection = require('./connection') + +class Client extends EventEmitter { + constructor(config) { + super() + + this.connectionParameters = new ConnectionParameters(config) + this.user = this.connectionParameters.user + this.database = this.connectionParameters.database + this.port = this.connectionParameters.port + this.host = this.connectionParameters.host + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: this.connectionParameters.password, + }) + + this.replication = this.connectionParameters.replication + + var c = config || {} + + this._Promise = c.Promise || global.Promise + this._types = new TypeOverrides(c.types) + this._ending = false + this._connecting = false + this._connected = false + this._connectionError = false + this._queryable = true + + this.connection = + c.connection || + new Connection({ + stream: c.stream, + ssl: this.connectionParameters.ssl, + keepAlive: c.keepAlive || false, + keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0, + encoding: this.connectionParameters.client_encoding || 'utf8', + }) + this.queryQueue = [] + this.binary = c.binary || defaults.binary + this.processID = null + this.secretKey = null + this.ssl = this.connectionParameters.ssl || false + // As with Password, make SSL->Key (the private key) non-enumerable. + // It won't show up in stack traces + // or if the client is console.logged + if (this.ssl && this.ssl.key) { + Object.defineProperty(this.ssl, 'key', { + enumerable: false, + }) + } + + this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0 + } + + _errorAllQueries(err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.handleError(err, this.connection) + }) + } + + if (this.activeQuery) { + enqueueError(this.activeQuery) + this.activeQuery = null + } + + this.queryQueue.forEach(enqueueError) + this.queryQueue.length = 0 + } + + _connect(callback) { + var self = this + var con = this.connection + this._connectionCallback = callback + + if (this._connecting || this._connected) { + const err = new Error('Client has already been connected. You cannot reuse a client.') + process.nextTick(() => { + callback(err) + }) + return + } + this._connecting = true + + this.connectionTimeoutHandle + if (this._connectionTimeoutMillis > 0) { + this.connectionTimeoutHandle = setTimeout(() => { + con._ending = true + con.stream.destroy(new Error('timeout expired')) + }, this._connectionTimeoutMillis) + } + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } + + // once connection is established send startup message + con.on('connect', function () { + if (self.ssl) { + con.requestSsl() + } else { + con.startup(self.getStartupConf()) + } + }) + + con.on('sslconnect', function () { + con.startup(self.getStartupConf()) + }) + + this._attachListeners(con) + + con.once('end', () => { + const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly') + + clearTimeout(this.connectionTimeoutHandle) + this._errorAllQueries(error) + + if (!this._ending) { + // if the connection is ended without us calling .end() + // on this client then we have an unexpected disconnection + // treat this as an error unless we've already emitted an error + // during connection. + if (this._connecting && !this._connectionError) { + if (this._connectionCallback) { + this._connectionCallback(error) + } else { + this._handleErrorEvent(error) + } + } else if (!this._connectionError) { + this._handleErrorEvent(error) + } + } + + process.nextTick(() => { + this.emit('end') + }) + }) + } + + connect(callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) + } + + _attachListeners(con) { + // password request handling + con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this)) + // password request handling + con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this)) + // password request handling (SASL) + con.on('authenticationSASL', this._handleAuthSASL.bind(this)) + con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this)) + con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this)) + con.on('backendKeyData', this._handleBackendKeyData.bind(this)) + con.on('error', this._handleErrorEvent.bind(this)) + con.on('errorMessage', this._handleErrorMessage.bind(this)) + con.on('readyForQuery', this._handleReadyForQuery.bind(this)) + con.on('notice', this._handleNotice.bind(this)) + con.on('rowDescription', this._handleRowDescription.bind(this)) + con.on('dataRow', this._handleDataRow.bind(this)) + con.on('portalSuspended', this._handlePortalSuspended.bind(this)) + con.on('emptyQuery', this._handleEmptyQuery.bind(this)) + con.on('commandComplete', this._handleCommandComplete.bind(this)) + con.on('parseComplete', this._handleParseComplete.bind(this)) + con.on('copyInResponse', this._handleCopyInResponse.bind(this)) + con.on('copyData', this._handleCopyData.bind(this)) + con.on('notification', this._handleNotification.bind(this)) + } + + // TODO(bmc): deprecate pgpass "built in" integration since this.password can be a function + // it can be supplied by the user if required - this is a breaking change! + _checkPgPass(cb) { + const con = this.connection + if (typeof this.password === 'function') { + this._Promise + .resolve() + .then(() => this.password()) + .then((pass) => { + if (pass !== undefined) { + if (typeof pass !== 'string') { + con.emit('error', new TypeError('Password must be a string')) + return + } + this.connectionParameters.password = this.password = pass + } else { + this.connectionParameters.password = this.password = null + } + cb() + }) + .catch((err) => { + con.emit('error', err) + }) + } else if (this.password !== null) { + cb() + } else { + pgPass(this.connectionParameters, (pass) => { + if (undefined !== pass) { + this.connectionParameters.password = this.password = pass + } + cb() + }) + } + } + + _handleAuthCleartextPassword(msg) { + this._checkPgPass(() => { + this.connection.password(this.password) + }) + } + + _handleAuthMD5Password(msg) { + this._checkPgPass(() => { + const hashedPassword = utils.postgresMd5PasswordHash(this.user, this.password, msg.salt) + this.connection.password(hashedPassword) + }) + } + + _handleAuthSASL(msg) { + this._checkPgPass(() => { + this.saslSession = sasl.startSession(msg.mechanisms) + this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response) + }) + } + + _handleAuthSASLContinue(msg) { + sasl.continueSession(this.saslSession, this.password, msg.data) + this.connection.sendSCRAMClientFinalMessage(this.saslSession.response) + } + + _handleAuthSASLFinal(msg) { + sasl.finalizeSession(this.saslSession, msg.data) + this.saslSession = null + } + + _handleBackendKeyData(msg) { + this.processID = msg.processID + this.secretKey = msg.secretKey + } + + _handleReadyForQuery(msg) { + if (this._connecting) { + this._connecting = false + this._connected = true + clearTimeout(this.connectionTimeoutHandle) + + // process possible callback argument to Client#connect + if (this._connectionCallback) { + this._connectionCallback(null, this) + // remove callback for proper error handling + // after the connect event + this._connectionCallback = null + } + this.emit('connect') + } + const { activeQuery } = this + this.activeQuery = null + this.readyForQuery = true + if (activeQuery) { + activeQuery.handleReadyForQuery(this.connection) + } + this._pulseQueryQueue() + } + + // if we receieve an error event or error message + // during the connection process we handle it here + _handleErrorWhileConnecting(err) { + if (this._connectionError) { + // TODO(bmc): this is swallowing errors - we shouldn't do this + return + } + this._connectionError = true + clearTimeout(this.connectionTimeoutHandle) + if (this._connectionCallback) { + return this._connectionCallback(err) + } + this.emit('error', err) + } + + // if we're connected and we receive an error event from the connection + // this means the socket is dead - do a hard abort of all queries and emit + // the socket error on the client as well + _handleErrorEvent(err) { + if (this._connecting) { + return this._handleErrorWhileConnecting(err) + } + this._queryable = false + this._errorAllQueries(err) + this.emit('error', err) + } + + // handle error messages from the postgres backend + _handleErrorMessage(msg) { + if (this._connecting) { + return this._handleErrorWhileConnecting(msg) + } + const activeQuery = this.activeQuery + + if (!activeQuery) { + this._handleErrorEvent(msg) + return + } + + this.activeQuery = null + activeQuery.handleError(msg, this.connection) + } + + _handleRowDescription(msg) { + // delegate rowDescription to active query + this.activeQuery.handleRowDescription(msg) + } + + _handleDataRow(msg) { + // delegate dataRow to active query + this.activeQuery.handleDataRow(msg) + } + + _handlePortalSuspended(msg) { + // delegate portalSuspended to active query + this.activeQuery.handlePortalSuspended(this.connection) + } + + _handleEmptyQuery(msg) { + // delegate emptyQuery to active query + this.activeQuery.handleEmptyQuery(this.connection) + } + + _handleCommandComplete(msg) { + // delegate commandComplete to active query + this.activeQuery.handleCommandComplete(msg, this.connection) + } + + _handleParseComplete(msg) { + // if a prepared statement has a name and properly parses + // we track that its already been executed so we don't parse + // it again on the same client + if (this.activeQuery.name) { + this.connection.parsedStatements[this.activeQuery.name] = this.activeQuery.text + } + } + + _handleCopyInResponse(msg) { + this.activeQuery.handleCopyInResponse(this.connection) + } + + _handleCopyData(msg) { + this.activeQuery.handleCopyData(msg, this.connection) + } + + _handleNotification(msg) { + this.emit('notification', msg) + } + + _handleNotice(msg) { + this.emit('notice', msg) + } + + getStartupConf() { + var params = this.connectionParameters + + var data = { + user: params.user, + database: params.database, + } + + var appName = params.application_name || params.fallback_application_name + if (appName) { + data.application_name = appName + } + if (params.replication) { + data.replication = '' + params.replication + } + if (params.statement_timeout) { + data.statement_timeout = String(parseInt(params.statement_timeout, 10)) + } + if (params.idle_in_transaction_session_timeout) { + data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10)) + } + if (params.options) { + data.options = params.options + } + + return data + } + + cancel(client, query) { + if (client.activeQuery === query) { + var con = this.connection + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } + + // once connection is established send cancel message + con.on('connect', function () { + con.cancel(client.processID, client.secretKey) + }) + } else if (client.queryQueue.indexOf(query) !== -1) { + client.queryQueue.splice(client.queryQueue.indexOf(query), 1) + } + } + + setTypeParser(oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) + } + + getTypeParser(oid, format) { + return this._types.getTypeParser(oid, format) + } + + // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c + escapeIdentifier(str) { + return '"' + str.replace(/"/g, '""') + '"' + } + + // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c + escapeLiteral(str) { + var hasBackslash = false + var escaped = "'" + + for (var i = 0; i < str.length; i++) { + var c = str[i] + if (c === "'") { + escaped += c + c + } else if (c === '\\') { + escaped += c + c + hasBackslash = true + } else { + escaped += c + } + } + + escaped += "'" + + if (hasBackslash === true) { + escaped = ' E' + escaped + } + + return escaped + } + + _pulseQueryQueue() { + if (this.readyForQuery === true) { + this.activeQuery = this.queryQueue.shift() + if (this.activeQuery) { + this.readyForQuery = false + this.hasExecuted = true + + const queryError = this.activeQuery.submit(this.connection) + if (queryError) { + process.nextTick(() => { + this.activeQuery.handleError(queryError, this.connection) + this.readyForQuery = true + this._pulseQueryQueue() + }) + } + } else if (this.hasExecuted) { + this.activeQuery = null + this.emit('drain') + } + } + } + + query(config, values, callback) { + // can take in strings, config object or query object + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback + + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + if (typeof values === 'function') { + query.callback = query.callback || values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new Query(config, values, callback) + if (!query.callback) { + result = new this._Promise((resolve, reject) => { + query.callback = (err, res) => (err ? reject(err) : resolve(res)) + }) + } + } + + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) + }) + + queryCallback(error) + + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this.queryQueue.indexOf(query) + if (index > -1) { + this.queryQueue.splice(index, 1) + } + + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (this.binary && !query.binary) { + query.binary = true + } + + if (query._result && !query._result._types) { + query._result._types = this._types + } + + if (!this._queryable) { + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection) + }) + return result + } + + if (this._ending) { + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable'), this.connection) + }) + return result + } + + this.queryQueue.push(query) + this._pulseQueryQueue() + return result + } + + ref() { + this.connection.ref() + } + + unref() { + this.connection.unref() + } + + end(cb) { + this._ending = true + + // if we have never connected, then end is a noop, callback immediately + if (!this.connection._connecting) { + if (cb) { + cb() + } else { + return this._Promise.resolve() + } + } + + if (this.activeQuery || !this._queryable) { + // if we have an active query we need to force a disconnect + // on the socket - otherwise a hung query could block end forever + this.connection.stream.destroy() + } else { + this.connection.end() + } + + if (cb) { + this.connection.once('end', cb) + } else { + return new this._Promise((resolve) => { + this.connection.once('end', resolve) + }) + } + } +} + +// expose a Query constructor +Client.Query = Query + +module.exports = Client diff --git a/reverse_engineering/node_modules/pg/lib/connection-parameters.js b/reverse_engineering/node_modules/pg/lib/connection-parameters.js new file mode 100644 index 0000000..165e6d5 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/connection-parameters.js @@ -0,0 +1,166 @@ +'use strict' + +var dns = require('dns') + +var defaults = require('./defaults') + +var parse = require('pg-connection-string').parse // parses a connection string + +var val = function (key, config, envVar) { + if (envVar === undefined) { + envVar = process.env['PG' + key.toUpperCase()] + } else if (envVar === false) { + // do nothing ... use false + } else { + envVar = process.env[envVar] + } + + return config[key] || envVar || defaults[key] +} + +var readSSLConfigFromEnvironment = function () { + switch (process.env.PGSSLMODE) { + case 'disable': + return false + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': + return true + case 'no-verify': + return { rejectUnauthorized: false } + } + return defaults.ssl +} + +// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes +var quoteParamValue = function (value) { + return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'" +} + +var add = function (params, config, paramName) { + var value = config[paramName] + if (value !== undefined && value !== null) { + params.push(paramName + '=' + quoteParamValue(value)) + } +} + +class ConnectionParameters { + constructor(config) { + // if a string is passed, it is a raw connection string so we parse it into a config + config = typeof config === 'string' ? parse(config) : config || {} + + // if the config has a connectionString defined, parse IT into the config we use + // this will override other default values with what is stored in connectionString + if (config.connectionString) { + config = Object.assign({}, config, parse(config.connectionString)) + } + + this.user = val('user', config) + this.database = val('database', config) + + if (this.database === undefined) { + this.database = this.user + } + + this.port = parseInt(val('port', config), 10) + this.host = val('host', config) + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: val('password', config), + }) + + this.binary = val('binary', config) + this.options = val('options', config) + + this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl + + if (typeof this.ssl === 'string') { + if (this.ssl === 'true') { + this.ssl = true + } + } + // support passing in ssl=no-verify via connection string + if (this.ssl === 'no-verify') { + this.ssl = { rejectUnauthorized: false } + } + if (this.ssl && this.ssl.key) { + Object.defineProperty(this.ssl, 'key', { + enumerable: false, + }) + } + + this.client_encoding = val('client_encoding', config) + this.replication = val('replication', config) + // a domain socket begins with '/' + this.isDomainSocket = !(this.host || '').indexOf('/') + + this.application_name = val('application_name', config, 'PGAPPNAME') + this.fallback_application_name = val('fallback_application_name', config, false) + this.statement_timeout = val('statement_timeout', config, false) + this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false) + this.query_timeout = val('query_timeout', config, false) + + if (config.connectionTimeoutMillis === undefined) { + this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0 + } else { + this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000) + } + + if (config.keepAlive === false) { + this.keepalives = 0 + } else if (config.keepAlive === true) { + this.keepalives = 1 + } + + if (typeof config.keepAliveInitialDelayMillis === 'number') { + this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000) + } + } + + getLibpqConnectionString(cb) { + var params = [] + add(params, this, 'user') + add(params, this, 'password') + add(params, this, 'port') + add(params, this, 'application_name') + add(params, this, 'fallback_application_name') + add(params, this, 'connect_timeout') + add(params, this, 'options') + + var ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {} + add(params, ssl, 'sslmode') + add(params, ssl, 'sslca') + add(params, ssl, 'sslkey') + add(params, ssl, 'sslcert') + add(params, ssl, 'sslrootcert') + + if (this.database) { + params.push('dbname=' + quoteParamValue(this.database)) + } + if (this.replication) { + params.push('replication=' + quoteParamValue(this.replication)) + } + if (this.host) { + params.push('host=' + quoteParamValue(this.host)) + } + if (this.isDomainSocket) { + return cb(null, params.join(' ')) + } + if (this.client_encoding) { + params.push('client_encoding=' + quoteParamValue(this.client_encoding)) + } + dns.lookup(this.host, function (err, address) { + if (err) return cb(err, null) + params.push('hostaddr=' + quoteParamValue(address)) + return cb(null, params.join(' ')) + }) + } +} + +module.exports = ConnectionParameters diff --git a/reverse_engineering/node_modules/pg/lib/connection.js b/reverse_engineering/node_modules/pg/lib/connection.js new file mode 100644 index 0000000..ebb2f09 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/connection.js @@ -0,0 +1,221 @@ +'use strict' + +var net = require('net') +var EventEmitter = require('events').EventEmitter + +const { parse, serialize } = require('pg-protocol') + +const flushBuffer = serialize.flush() +const syncBuffer = serialize.sync() +const endBuffer = serialize.end() + +// TODO(bmc) support binary mode at some point +class Connection extends EventEmitter { + constructor(config) { + super() + config = config || {} + this.stream = config.stream || new net.Socket() + this._keepAlive = config.keepAlive + this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis + this.lastBuffer = false + this.parsedStatements = {} + this.ssl = config.ssl || false + this._ending = false + this._emitMessage = false + var self = this + this.on('newListener', function (eventName) { + if (eventName === 'message') { + self._emitMessage = true + } + }) + } + + connect(port, host) { + var self = this + + this._connecting = true + this.stream.setNoDelay(true) + this.stream.connect(port, host) + + this.stream.once('connect', function () { + if (self._keepAlive) { + self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis) + } + self.emit('connect') + }) + + const reportStreamError = function (error) { + // errors about disconnections should be ignored during disconnect + if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { + return + } + self.emit('error', error) + } + this.stream.on('error', reportStreamError) + + this.stream.on('close', function () { + self.emit('end') + }) + + if (!this.ssl) { + return this.attachListeners(this.stream) + } + + this.stream.once('data', function (buffer) { + var responseCode = buffer.toString('utf8') + switch (responseCode) { + case 'S': // Server supports SSL connections, continue with a secure connection + break + case 'N': // Server does not support SSL connections + self.stream.end() + return self.emit('error', new Error('The server does not support SSL connections')) + default: + // Any other response byte, including 'E' (ErrorResponse) indicating a server error + self.stream.end() + return self.emit('error', new Error('There was an error establishing an SSL connection')) + } + var tls = require('tls') + const options = { + socket: self.stream, + } + + if (self.ssl !== true) { + Object.assign(options, self.ssl) + + if ('key' in self.ssl) { + options.key = self.ssl.key + } + } + + if (net.isIP(host) === 0) { + options.servername = host + } + try { + self.stream = tls.connect(options) + } catch (err) { + return self.emit('error', err) + } + self.attachListeners(self.stream) + self.stream.on('error', reportStreamError) + + self.emit('sslconnect') + }) + } + + attachListeners(stream) { + stream.on('end', () => { + this.emit('end') + }) + parse(stream, (msg) => { + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (this._emitMessage) { + this.emit('message', msg) + } + this.emit(eventName, msg) + }) + } + + requestSsl() { + this.stream.write(serialize.requestSsl()) + } + + startup(config) { + this.stream.write(serialize.startup(config)) + } + + cancel(processID, secretKey) { + this._send(serialize.cancel(processID, secretKey)) + } + + password(password) { + this._send(serialize.password(password)) + } + + sendSASLInitialResponseMessage(mechanism, initialResponse) { + this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse)) + } + + sendSCRAMClientFinalMessage(additionalData) { + this._send(serialize.sendSCRAMClientFinalMessage(additionalData)) + } + + _send(buffer) { + if (!this.stream.writable) { + return false + } + return this.stream.write(buffer) + } + + query(text) { + this._send(serialize.query(text)) + } + + // send parse message + parse(query) { + this._send(serialize.parse(query)) + } + + // send bind message + bind(config) { + this._send(serialize.bind(config)) + } + + // send execute message + execute(config) { + this._send(serialize.execute(config)) + } + + flush() { + if (this.stream.writable) { + this.stream.write(flushBuffer) + } + } + + sync() { + this._ending = true + this._send(flushBuffer) + this._send(syncBuffer) + } + + ref() { + this.stream.ref() + } + + unref() { + this.stream.unref() + } + + end() { + // 0x58 = 'X' + this._ending = true + if (!this._connecting || !this.stream.writable) { + this.stream.end() + return + } + return this.stream.write(endBuffer, () => { + this.stream.end() + }) + } + + close(msg) { + this._send(serialize.close(msg)) + } + + describe(msg) { + this._send(serialize.describe(msg)) + } + + sendCopyFromChunk(chunk) { + this._send(serialize.copyData(chunk)) + } + + endCopyFrom() { + this._send(serialize.copyDone()) + } + + sendCopyFail(msg) { + this._send(serialize.copyFail(msg)) + } +} + +module.exports = Connection diff --git a/reverse_engineering/node_modules/pg/lib/defaults.js b/reverse_engineering/node_modules/pg/lib/defaults.js new file mode 100644 index 0000000..9384e01 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/defaults.js @@ -0,0 +1,80 @@ +'use strict' + +module.exports = { + // database host. defaults to localhost + host: 'localhost', + + // database user's name + user: process.platform === 'win32' ? process.env.USERNAME : process.env.USER, + + // name of database to connect + database: undefined, + + // database user's password + password: null, + + // a Postgres connection string to be used instead of setting individual connection items + // NOTE: Setting this value will cause it to override any other value (such as database or user) defined + // in the defaults object. + connectionString: undefined, + + // database port + port: 5432, + + // number of rows to return at a time from a prepared statement's + // portal. 0 will return all rows at once + rows: 0, + + // binary result mode + binary: false, + + // Connection pool options - see https://github.com/brianc/node-pg-pool + + // number of connections to use in connection pool + // 0 will disable connection pooling + max: 10, + + // max milliseconds a client can go unused before it is removed + // from the pool and destroyed + idleTimeoutMillis: 30000, + + client_encoding: '', + + ssl: false, + + application_name: undefined, + + fallback_application_name: undefined, + + options: undefined, + + parseInputDatesAsUTC: false, + + // max milliseconds any query using this connection will execute for before timing out in error. + // false=unlimited + statement_timeout: false, + + // Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds + // false=unlimited + idle_in_transaction_session_timeout: false, + + // max milliseconds to wait for query to complete (client side) + query_timeout: false, + + connect_timeout: 0, + + keepalives: 1, + + keepalives_idle: 0, +} + +var pgTypes = require('pg-types') +// save default parsers +var parseBigInteger = pgTypes.getTypeParser(20, 'text') +var parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text') + +// parse int8 so you can get your count values as actual numbers +module.exports.__defineSetter__('parseInt8', function (val) { + pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger) + pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray) +}) diff --git a/reverse_engineering/node_modules/pg/lib/index.js b/reverse_engineering/node_modules/pg/lib/index.js new file mode 100644 index 0000000..7f02aba --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/index.js @@ -0,0 +1,55 @@ +'use strict' + +var Client = require('./client') +var defaults = require('./defaults') +var Connection = require('./connection') +var Pool = require('pg-pool') +const { DatabaseError } = require('pg-protocol') + +const poolFactory = (Client) => { + return class BoundPool extends Pool { + constructor(options) { + super(options, Client) + } + } +} + +var PG = function (clientConstructor) { + this.defaults = defaults + this.Client = clientConstructor + this.Query = this.Client.Query + this.Pool = poolFactory(this.Client) + this._pools = [] + this.Connection = Connection + this.types = require('pg-types') + this.DatabaseError = DatabaseError +} + +if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') { + module.exports = new PG(require('./native')) +} else { + module.exports = new PG(Client) + + // lazy require native module...the native module may not have installed + Object.defineProperty(module.exports, 'native', { + configurable: true, + enumerable: false, + get() { + var native = null + try { + native = new PG(require('./native')) + } catch (err) { + if (err.code !== 'MODULE_NOT_FOUND') { + throw err + } + } + + // overwrite module.exports.native so that getter is never called again + Object.defineProperty(module.exports, 'native', { + value: native, + }) + + return native + }, + }) +} diff --git a/reverse_engineering/node_modules/pg/lib/native/client.js b/reverse_engineering/node_modules/pg/lib/native/client.js new file mode 100644 index 0000000..d1faeb3 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/native/client.js @@ -0,0 +1,297 @@ +'use strict' + +// eslint-disable-next-line +var Native = require('pg-native') +var TypeOverrides = require('../type-overrides') +var pkg = require('../../package.json') +var EventEmitter = require('events').EventEmitter +var util = require('util') +var ConnectionParameters = require('../connection-parameters') + +var NativeQuery = require('./query') + +var Client = (module.exports = function (config) { + EventEmitter.call(this) + config = config || {} + + this._Promise = config.Promise || global.Promise + this._types = new TypeOverrides(config.types) + + this.native = new Native({ + types: this._types, + }) + + this._queryQueue = [] + this._ending = false + this._connecting = false + this._connected = false + this._queryable = true + + // keep these on the object for legacy reasons + // for the time being. TODO: deprecate all this jazz + var cp = (this.connectionParameters = new ConnectionParameters(config)) + this.user = cp.user + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: cp.password, + }) + this.database = cp.database + this.host = cp.host + this.port = cp.port + + // a hash to hold named queries + this.namedQueries = {} +}) + +Client.Query = NativeQuery + +util.inherits(Client, EventEmitter) + +Client.prototype._errorAllQueries = function (err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.native = this.native + query.handleError(err) + }) + } + + if (this._hasActiveQuery()) { + enqueueError(this._activeQuery) + this._activeQuery = null + } + + this._queryQueue.forEach(enqueueError) + this._queryQueue.length = 0 +} + +// connect to the backend +// pass an optional callback to be called once connected +// or with an error if there was a connection error +Client.prototype._connect = function (cb) { + var self = this + + if (this._connecting) { + process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.'))) + return + } + + this._connecting = true + + this.connectionParameters.getLibpqConnectionString(function (err, conString) { + if (err) return cb(err) + self.native.connect(conString, function (err) { + if (err) { + self.native.end() + return cb(err) + } + + // set internal states to connected + self._connected = true + + // handle connection errors from the native layer + self.native.on('error', function (err) { + self._queryable = false + self._errorAllQueries(err) + self.emit('error', err) + }) + + self.native.on('notification', function (msg) { + self.emit('notification', { + channel: msg.relname, + payload: msg.extra, + }) + }) + + // signal we are connected now + self.emit('connect') + self._pulseQueryQueue(true) + + cb() + }) + }) +} + +Client.prototype.connect = function (callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) +} + +// send a query to the server +// this method is highly overloaded to take +// 1) string query, optional array of parameters, optional function callback +// 2) object query with { +// string query +// optional array values, +// optional function callback instead of as a separate parameter +// optional string name to name & cache the query plan +// optional string rowMode = 'array' for an array of results +// } +Client.prototype.query = function (config, values, callback) { + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback + + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + // accept query(new Query(...), (err, res) => { }) style + if (typeof values === 'function') { + config.callback = values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new NativeQuery(config, values, callback) + if (!query.callback) { + let resolveOut, rejectOut + result = new this._Promise((resolve, reject) => { + resolveOut = resolve + rejectOut = reject + }) + query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res)) + } + } + + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) + }) + + queryCallback(error) + + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this._queryQueue.indexOf(query) + if (index > -1) { + this._queryQueue.splice(index, 1) + } + + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (!this._queryable) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable')) + }) + return result + } + + if (this._ending) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable')) + }) + return result + } + + this._queryQueue.push(query) + this._pulseQueryQueue() + return result +} + +// disconnect from the backend server +Client.prototype.end = function (cb) { + var self = this + + this._ending = true + + if (!this._connected) { + this.once('connect', this.end.bind(this, cb)) + } + var result + if (!cb) { + result = new this._Promise(function (resolve, reject) { + cb = (err) => (err ? reject(err) : resolve()) + }) + } + this.native.end(function () { + self._errorAllQueries(new Error('Connection terminated')) + + process.nextTick(() => { + self.emit('end') + if (cb) cb() + }) + }) + return result +} + +Client.prototype._hasActiveQuery = function () { + return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end' +} + +Client.prototype._pulseQueryQueue = function (initialConnection) { + if (!this._connected) { + return + } + if (this._hasActiveQuery()) { + return + } + var query = this._queryQueue.shift() + if (!query) { + if (!initialConnection) { + this.emit('drain') + } + return + } + this._activeQuery = query + query.submit(this) + var self = this + query.once('_done', function () { + self._pulseQueryQueue() + }) +} + +// attempt to cancel an in-progress query +Client.prototype.cancel = function (query) { + if (this._activeQuery === query) { + this.native.cancel(function () {}) + } else if (this._queryQueue.indexOf(query) !== -1) { + this._queryQueue.splice(this._queryQueue.indexOf(query), 1) + } +} + +Client.prototype.ref = function () {} +Client.prototype.unref = function () {} + +Client.prototype.setTypeParser = function (oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) +} + +Client.prototype.getTypeParser = function (oid, format) { + return this._types.getTypeParser(oid, format) +} diff --git a/reverse_engineering/node_modules/pg/lib/native/index.js b/reverse_engineering/node_modules/pg/lib/native/index.js new file mode 100644 index 0000000..eead422 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/native/index.js @@ -0,0 +1,2 @@ +'use strict' +module.exports = require('./client') diff --git a/reverse_engineering/node_modules/pg/lib/native/query.js b/reverse_engineering/node_modules/pg/lib/native/query.js new file mode 100644 index 0000000..d06db43 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/native/query.js @@ -0,0 +1,165 @@ +'use strict' + +var EventEmitter = require('events').EventEmitter +var util = require('util') +var utils = require('../utils') + +var NativeQuery = (module.exports = function (config, values, callback) { + EventEmitter.call(this) + config = utils.normalizeQueryConfig(config, values, callback) + this.text = config.text + this.values = config.values + this.name = config.name + this.callback = config.callback + this.state = 'new' + this._arrayMode = config.rowMode === 'array' + + // if the 'row' event is listened for + // then emit them as they come in + // without setting singleRowMode to true + // this has almost no meaning because libpq + // reads all rows into memory befor returning any + this._emitRowEvents = false + this.on( + 'newListener', + function (event) { + if (event === 'row') this._emitRowEvents = true + }.bind(this) + ) +}) + +util.inherits(NativeQuery, EventEmitter) + +var errorFieldMap = { + /* eslint-disable quote-props */ + sqlState: 'code', + statementPosition: 'position', + messagePrimary: 'message', + context: 'where', + schemaName: 'schema', + tableName: 'table', + columnName: 'column', + dataTypeName: 'dataType', + constraintName: 'constraint', + sourceFile: 'file', + sourceLine: 'line', + sourceFunction: 'routine', +} + +NativeQuery.prototype.handleError = function (err) { + // copy pq error fields into the error object + var fields = this.native.pq.resultErrorFields() + if (fields) { + for (var key in fields) { + var normalizedFieldName = errorFieldMap[key] || key + err[normalizedFieldName] = fields[key] + } + } + if (this.callback) { + this.callback(err) + } else { + this.emit('error', err) + } + this.state = 'error' +} + +NativeQuery.prototype.then = function (onSuccess, onFailure) { + return this._getPromise().then(onSuccess, onFailure) +} + +NativeQuery.prototype.catch = function (callback) { + return this._getPromise().catch(callback) +} + +NativeQuery.prototype._getPromise = function () { + if (this._promise) return this._promise + this._promise = new Promise( + function (resolve, reject) { + this._once('end', resolve) + this._once('error', reject) + }.bind(this) + ) + return this._promise +} + +NativeQuery.prototype.submit = function (client) { + this.state = 'running' + var self = this + this.native = client.native + client.native.arrayMode = this._arrayMode + + var after = function (err, rows, results) { + client.native.arrayMode = false + setImmediate(function () { + self.emit('_done') + }) + + // handle possible query error + if (err) { + return self.handleError(err) + } + + // emit row events for each row in the result + if (self._emitRowEvents) { + if (results.length > 1) { + rows.forEach((rowOfRows, i) => { + rowOfRows.forEach((row) => { + self.emit('row', row, results[i]) + }) + }) + } else { + rows.forEach(function (row) { + self.emit('row', row, results) + }) + } + } + + // handle successful result + self.state = 'end' + self.emit('end', results) + if (self.callback) { + self.callback(null, results) + } + } + + if (process.domain) { + after = process.domain.bind(after) + } + + // named query + if (this.name) { + if (this.name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', this.name, this.name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + var values = (this.values || []).map(utils.prepareValue) + + // check if the client has already executed this named query + // if so...just execute it again - skip the planning phase + if (client.namedQueries[this.name]) { + if (this.text && client.namedQueries[this.name] !== this.text) { + const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + return after(err) + } + return client.native.execute(this.name, values, after) + } + // plan the named query the first time, then execute it + return client.native.prepare(this.name, this.text, values.length, function (err) { + if (err) return after(err) + client.namedQueries[self.name] = self.text + return self.native.execute(self.name, values, after) + }) + } else if (this.values) { + if (!Array.isArray(this.values)) { + const err = new Error('Query values must be an array') + return after(err) + } + var vals = this.values.map(utils.prepareValue) + client.native.query(this.text, vals, after) + } else { + client.native.query(this.text, after) + } +} diff --git a/reverse_engineering/node_modules/pg/lib/query.js b/reverse_engineering/node_modules/pg/lib/query.js new file mode 100644 index 0000000..c0dfedd --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/query.js @@ -0,0 +1,234 @@ +'use strict' + +const { EventEmitter } = require('events') + +const Result = require('./result') +const utils = require('./utils') + +class Query extends EventEmitter { + constructor(config, values, callback) { + super() + + config = utils.normalizeQueryConfig(config, values, callback) + + this.text = config.text + this.values = config.values + this.rows = config.rows + this.types = config.types + this.name = config.name + this.binary = config.binary + // use unique portal name each time + this.portal = config.portal || '' + this.callback = config.callback + this._rowMode = config.rowMode + if (process.domain && config.callback) { + this.callback = process.domain.bind(config.callback) + } + this._result = new Result(this._rowMode, this.types) + + // potential for multiple results + this._results = this._result + this.isPreparedStatement = false + this._canceledDueToError = false + this._promise = null + } + + requiresPreparation() { + // named queries must always be prepared + if (this.name) { + return true + } + // always prepare if there are max number of rows expected per + // portal execution + if (this.rows) { + return true + } + // don't prepare empty text queries + if (!this.text) { + return false + } + // prepare if there are values + if (!this.values) { + return false + } + return this.values.length > 0 + } + + _checkForMultirow() { + // if we already have a result with a command property + // then we've already executed one query in a multi-statement simple query + // turn our results into an array of results + if (this._result.command) { + if (!Array.isArray(this._results)) { + this._results = [this._result] + } + this._result = new Result(this._rowMode, this.types) + this._results.push(this._result) + } + } + + // associates row metadata from the supplied + // message with this query object + // metadata used when parsing row results + handleRowDescription(msg) { + this._checkForMultirow() + this._result.addFields(msg.fields) + this._accumulateRows = this.callback || !this.listeners('row').length + } + + handleDataRow(msg) { + let row + + if (this._canceledDueToError) { + return + } + + try { + row = this._result.parseRow(msg.fields) + } catch (err) { + this._canceledDueToError = err + return + } + + this.emit('row', row, this._result) + if (this._accumulateRows) { + this._result.addRow(row) + } + } + + handleCommandComplete(msg, connection) { + this._checkForMultirow() + this._result.addCommandComplete(msg) + // need to sync after each command complete of a prepared statement + // if we were using a row count which results in multiple calls to _getRows + if (this.rows) { + connection.sync() + } + } + + // if a named prepared statement is created with empty query text + // the backend will send an emptyQuery message but *not* a command complete message + // since we pipeline sync immediately after execute we don't need to do anything here + // unless we have rows specified, in which case we did not pipeline the intial sync call + handleEmptyQuery(connection) { + if (this.rows) { + connection.sync() + } + } + + handleError(err, connection) { + // need to sync after error during a prepared statement + if (this._canceledDueToError) { + err = this._canceledDueToError + this._canceledDueToError = false + } + // if callback supplied do not emit error event as uncaught error + // events will bubble up to node process + if (this.callback) { + return this.callback(err) + } + this.emit('error', err) + } + + handleReadyForQuery(con) { + if (this._canceledDueToError) { + return this.handleError(this._canceledDueToError, con) + } + if (this.callback) { + this.callback(null, this._results) + } + this.emit('end', this._results) + } + + submit(connection) { + if (typeof this.text !== 'string' && typeof this.name !== 'string') { + return new Error('A query must have either text or a name. Supplying neither is unsupported.') + } + const previous = connection.parsedStatements[this.name] + if (this.text && previous && this.text !== previous) { + return new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + } + if (this.values && !Array.isArray(this.values)) { + return new Error('Query values must be an array') + } + if (this.requiresPreparation()) { + this.prepare(connection) + } else { + connection.query(this.text) + } + return null + } + + hasBeenParsed(connection) { + return this.name && connection.parsedStatements[this.name] + } + + handlePortalSuspended(connection) { + this._getRows(connection, this.rows) + } + + _getRows(connection, rows) { + connection.execute({ + portal: this.portal, + rows: rows, + }) + // if we're not reading pages of rows send the sync command + // to indicate the pipeline is finished + if (!rows) { + connection.sync() + } else { + // otherwise flush the call out to read more rows + connection.flush() + } + } + + // http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY + prepare(connection) { + // prepared statements need sync to be called after each command + // complete or when an error is encountered + this.isPreparedStatement = true + + // TODO refactor this poor encapsulation + if (!this.hasBeenParsed(connection)) { + connection.parse({ + text: this.text, + name: this.name, + types: this.types, + }) + } + + // because we're mapping user supplied values to + // postgres wire protocol compatible values it could + // throw an exception, so try/catch this section + try { + connection.bind({ + portal: this.portal, + statement: this.name, + values: this.values, + binary: this.binary, + valueMapper: utils.prepareValue, + }) + } catch (err) { + this.handleError(err, connection) + return + } + + connection.describe({ + type: 'P', + name: this.portal || '', + }) + + this._getRows(connection, this.rows) + } + + handleCopyInResponse(connection) { + connection.sendCopyFail('No source stream defined') + } + + // eslint-disable-next-line no-unused-vars + handleCopyData(msg, connection) { + // noop + } +} + +module.exports = Query diff --git a/reverse_engineering/node_modules/pg/lib/result.js b/reverse_engineering/node_modules/pg/lib/result.js new file mode 100644 index 0000000..3506097 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/result.js @@ -0,0 +1,100 @@ +'use strict' + +var types = require('pg-types') + +var matchRegexp = /^([A-Za-z]+)(?: (\d+))?(?: (\d+))?/ + +// result object returned from query +// in the 'end' event and also +// passed as second argument to provided callback +class Result { + constructor(rowMode, types) { + this.command = null + this.rowCount = null + this.oid = null + this.rows = [] + this.fields = [] + this._parsers = undefined + this._types = types + this.RowCtor = null + this.rowAsArray = rowMode === 'array' + if (this.rowAsArray) { + this.parseRow = this._parseRowAsArray + } + } + + // adds a command complete message + addCommandComplete(msg) { + var match + if (msg.text) { + // pure javascript + match = matchRegexp.exec(msg.text) + } else { + // native bindings + match = matchRegexp.exec(msg.command) + } + if (match) { + this.command = match[1] + if (match[3]) { + // COMMMAND OID ROWS + this.oid = parseInt(match[2], 10) + this.rowCount = parseInt(match[3], 10) + } else if (match[2]) { + // COMMAND ROWS + this.rowCount = parseInt(match[2], 10) + } + } + } + + _parseRowAsArray(rowData) { + var row = new Array(rowData.length) + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + if (rawValue !== null) { + row[i] = this._parsers[i](rawValue) + } else { + row[i] = null + } + } + return row + } + + parseRow(rowData) { + var row = {} + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + var field = this.fields[i].name + if (rawValue !== null) { + row[field] = this._parsers[i](rawValue) + } else { + row[field] = null + } + } + return row + } + + addRow(row) { + this.rows.push(row) + } + + addFields(fieldDescriptions) { + // clears field definitions + // multiple query statements in 1 action can result in multiple sets + // of rowDescriptions...eg: 'select NOW(); select 1::int;' + // you need to reset the fields + this.fields = fieldDescriptions + if (this.fields.length) { + this._parsers = new Array(fieldDescriptions.length) + } + for (var i = 0; i < fieldDescriptions.length; i++) { + var desc = fieldDescriptions[i] + if (this._types) { + this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } else { + this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } + } + } +} + +module.exports = Result diff --git a/reverse_engineering/node_modules/pg/lib/sasl.js b/reverse_engineering/node_modules/pg/lib/sasl.js new file mode 100644 index 0000000..c618047 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/sasl.js @@ -0,0 +1,209 @@ +'use strict' +const crypto = require('crypto') + +function startSession(mechanisms) { + if (mechanisms.indexOf('SCRAM-SHA-256') === -1) { + throw new Error('SASL: Only mechanism SCRAM-SHA-256 is currently supported') + } + + const clientNonce = crypto.randomBytes(18).toString('base64') + + return { + mechanism: 'SCRAM-SHA-256', + clientNonce, + response: 'n,,n=*,r=' + clientNonce, + message: 'SASLInitialResponse', + } +} + +function continueSession(session, password, serverData) { + if (session.message !== 'SASLInitialResponse') { + throw new Error('SASL: Last message was not SASLInitialResponse') + } + if (typeof password !== 'string') { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a string') + } + if (typeof serverData !== 'string') { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: serverData must be a string') + } + + const sv = parseServerFirstMessage(serverData) + + if (!sv.nonce.startsWith(session.clientNonce)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce') + } else if (sv.nonce.length === session.clientNonce.length) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce is too short') + } + + var saltBytes = Buffer.from(sv.salt, 'base64') + + var saltedPassword = Hi(password, saltBytes, sv.iteration) + + var clientKey = hmacSha256(saltedPassword, 'Client Key') + var storedKey = sha256(clientKey) + + var clientFirstMessageBare = 'n=*,r=' + session.clientNonce + var serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration + + var clientFinalMessageWithoutProof = 'c=biws,r=' + sv.nonce + + var authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof + + var clientSignature = hmacSha256(storedKey, authMessage) + var clientProofBytes = xorBuffers(clientKey, clientSignature) + var clientProof = clientProofBytes.toString('base64') + + var serverKey = hmacSha256(saltedPassword, 'Server Key') + var serverSignatureBytes = hmacSha256(serverKey, authMessage) + + session.message = 'SASLResponse' + session.serverSignature = serverSignatureBytes.toString('base64') + session.response = clientFinalMessageWithoutProof + ',p=' + clientProof +} + +function finalizeSession(session, serverData) { + if (session.message !== 'SASLResponse') { + throw new Error('SASL: Last message was not SASLResponse') + } + if (typeof serverData !== 'string') { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: serverData must be a string') + } + + const { serverSignature } = parseServerFinalMessage(serverData) + + if (serverSignature !== session.serverSignature) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match') + } +} + +/** + * printable = %x21-2B / %x2D-7E + * ;; Printable ASCII except ",". + * ;; Note that any "printable" is also + * ;; a valid "value". + */ +function isPrintableChars(text) { + if (typeof text !== 'string') { + throw new TypeError('SASL: text must be a string') + } + return text + .split('') + .map((_, i) => text.charCodeAt(i)) + .every((c) => (c >= 0x21 && c <= 0x2b) || (c >= 0x2d && c <= 0x7e)) +} + +/** + * base64-char = ALPHA / DIGIT / "/" / "+" + * + * base64-4 = 4base64-char + * + * base64-3 = 3base64-char "=" + * + * base64-2 = 2base64-char "==" + * + * base64 = *base64-4 [base64-3 / base64-2] + */ +function isBase64(text) { + return /^(?:[a-zA-Z0-9+/]{4})*(?:[a-zA-Z0-9+/]{2}==|[a-zA-Z0-9+/]{3}=)?$/.test(text) +} + +function parseAttributePairs(text) { + if (typeof text !== 'string') { + throw new TypeError('SASL: attribute pairs text must be a string') + } + + return new Map( + text.split(',').map((attrValue) => { + if (!/^.=/.test(attrValue)) { + throw new Error('SASL: Invalid attribute pair entry') + } + const name = attrValue[0] + const value = attrValue.substring(2) + return [name, value] + }) + ) +} + +function parseServerFirstMessage(data) { + const attrPairs = parseAttributePairs(data) + + const nonce = attrPairs.get('r') + if (!nonce) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing') + } else if (!isPrintableChars(nonce)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce must only contain printable characters') + } + const salt = attrPairs.get('s') + if (!salt) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing') + } else if (!isBase64(salt)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt must be base64') + } + const iterationText = attrPairs.get('i') + if (!iterationText) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing') + } else if (!/^[1-9][0-9]*$/.test(iterationText)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: invalid iteration count') + } + const iteration = parseInt(iterationText, 10) + + return { + nonce, + salt, + iteration, + } +} + +function parseServerFinalMessage(serverData) { + const attrPairs = parseAttributePairs(serverData) + const serverSignature = attrPairs.get('v') + if (!serverSignature) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature is missing') + } else if (!isBase64(serverSignature)) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature must be base64') + } + return { + serverSignature, + } +} + +function xorBuffers(a, b) { + if (!Buffer.isBuffer(a)) { + throw new TypeError('first argument must be a Buffer') + } + if (!Buffer.isBuffer(b)) { + throw new TypeError('second argument must be a Buffer') + } + if (a.length !== b.length) { + throw new Error('Buffer lengths must match') + } + if (a.length === 0) { + throw new Error('Buffers cannot be empty') + } + return Buffer.from(a.map((_, i) => a[i] ^ b[i])) +} + +function sha256(text) { + return crypto.createHash('sha256').update(text).digest() +} + +function hmacSha256(key, msg) { + return crypto.createHmac('sha256', key).update(msg).digest() +} + +function Hi(password, saltBytes, iterations) { + var ui1 = hmacSha256(password, Buffer.concat([saltBytes, Buffer.from([0, 0, 0, 1])])) + var ui = ui1 + for (var i = 0; i < iterations - 1; i++) { + ui1 = hmacSha256(password, ui1) + ui = xorBuffers(ui, ui1) + } + + return ui +} + +module.exports = { + startSession, + continueSession, + finalizeSession, +} diff --git a/reverse_engineering/node_modules/pg/lib/type-overrides.js b/reverse_engineering/node_modules/pg/lib/type-overrides.js new file mode 100644 index 0000000..6669348 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/type-overrides.js @@ -0,0 +1,35 @@ +'use strict' + +var types = require('pg-types') + +function TypeOverrides(userTypes) { + this._types = userTypes || types + this.text = {} + this.binary = {} +} + +TypeOverrides.prototype.getOverrides = function (format) { + switch (format) { + case 'text': + return this.text + case 'binary': + return this.binary + default: + return {} + } +} + +TypeOverrides.prototype.setTypeParser = function (oid, format, parseFn) { + if (typeof format === 'function') { + parseFn = format + format = 'text' + } + this.getOverrides(format)[oid] = parseFn +} + +TypeOverrides.prototype.getTypeParser = function (oid, format) { + format = format || 'text' + return this.getOverrides(format)[oid] || this._types.getTypeParser(oid, format) +} + +module.exports = TypeOverrides diff --git a/reverse_engineering/node_modules/pg/lib/utils.js b/reverse_engineering/node_modules/pg/lib/utils.js new file mode 100644 index 0000000..d63fe68 --- /dev/null +++ b/reverse_engineering/node_modules/pg/lib/utils.js @@ -0,0 +1,187 @@ +'use strict' + +const crypto = require('crypto') + +const defaults = require('./defaults') + +function escapeElement(elementRepresentation) { + var escaped = elementRepresentation.replace(/\\/g, '\\\\').replace(/"/g, '\\"') + + return '"' + escaped + '"' +} + +// convert a JS array to a postgres array literal +// uses comma separator so won't work for types like box that use +// a different array separator. +function arrayString(val) { + var result = '{' + for (var i = 0; i < val.length; i++) { + if (i > 0) { + result = result + ',' + } + if (val[i] === null || typeof val[i] === 'undefined') { + result = result + 'NULL' + } else if (Array.isArray(val[i])) { + result = result + arrayString(val[i]) + } else if (val[i] instanceof Buffer) { + result += '\\\\x' + val[i].toString('hex') + } else { + result += escapeElement(prepareValue(val[i])) + } + } + result = result + '}' + return result +} + +// converts values from javascript types +// to their 'raw' counterparts for use as a postgres parameter +// note: you can override this function to provide your own conversion mechanism +// for complex types, etc... +var prepareValue = function (val, seen) { + // null and undefined are both null for postgres + if (val == null) { + return null + } + if (val instanceof Buffer) { + return val + } + if (ArrayBuffer.isView(val)) { + var buf = Buffer.from(val.buffer, val.byteOffset, val.byteLength) + if (buf.length === val.byteLength) { + return buf + } + return buf.slice(val.byteOffset, val.byteOffset + val.byteLength) // Node.js v4 does not support those Buffer.from params + } + if (val instanceof Date) { + if (defaults.parseInputDatesAsUTC) { + return dateToStringUTC(val) + } else { + return dateToString(val) + } + } + if (Array.isArray(val)) { + return arrayString(val) + } + if (typeof val === 'object') { + return prepareObject(val, seen) + } + return val.toString() +} + +function prepareObject(val, seen) { + if (val && typeof val.toPostgres === 'function') { + seen = seen || [] + if (seen.indexOf(val) !== -1) { + throw new Error('circular reference detected while preparing "' + val + '" for query') + } + seen.push(val) + + return prepareValue(val.toPostgres(prepareValue), seen) + } + return JSON.stringify(val) +} + +function pad(number, digits) { + number = '' + number + while (number.length < digits) { + number = '0' + number + } + return number +} + +function dateToString(date) { + var offset = -date.getTimezoneOffset() + + var year = date.getFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + + var ret = + pad(year, 4) + + '-' + + pad(date.getMonth() + 1, 2) + + '-' + + pad(date.getDate(), 2) + + 'T' + + pad(date.getHours(), 2) + + ':' + + pad(date.getMinutes(), 2) + + ':' + + pad(date.getSeconds(), 2) + + '.' + + pad(date.getMilliseconds(), 3) + + if (offset < 0) { + ret += '-' + offset *= -1 + } else { + ret += '+' + } + + ret += pad(Math.floor(offset / 60), 2) + ':' + pad(offset % 60, 2) + if (isBCYear) ret += ' BC' + return ret +} + +function dateToStringUTC(date) { + var year = date.getUTCFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + + var ret = + pad(year, 4) + + '-' + + pad(date.getUTCMonth() + 1, 2) + + '-' + + pad(date.getUTCDate(), 2) + + 'T' + + pad(date.getUTCHours(), 2) + + ':' + + pad(date.getUTCMinutes(), 2) + + ':' + + pad(date.getUTCSeconds(), 2) + + '.' + + pad(date.getUTCMilliseconds(), 3) + + ret += '+00:00' + if (isBCYear) ret += ' BC' + return ret +} + +function normalizeQueryConfig(config, values, callback) { + // can take in strings or config objects + config = typeof config === 'string' ? { text: config } : config + if (values) { + if (typeof values === 'function') { + config.callback = values + } else { + config.values = values + } + } + if (callback) { + config.callback = callback + } + return config +} + +const md5 = function (string) { + return crypto.createHash('md5').update(string, 'utf-8').digest('hex') +} + +// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html +const postgresMd5PasswordHash = function (user, password, salt) { + var inner = md5(password + user) + var outer = md5(Buffer.concat([Buffer.from(inner), salt])) + return 'md5' + outer +} + +module.exports = { + prepareValue: function prepareValueWrapper(value) { + // this ensures that extra arguments do not get passed into prepareValue + // by accident, eg: from calling values.map(utils.prepareValue) + return prepareValue(value) + }, + normalizeQueryConfig, + postgresMd5PasswordHash, + md5, +} diff --git a/reverse_engineering/node_modules/pg/package.json b/reverse_engineering/node_modules/pg/package.json new file mode 100644 index 0000000..6c532d5 --- /dev/null +++ b/reverse_engineering/node_modules/pg/package.json @@ -0,0 +1,89 @@ +{ + "_from": "pg", + "_id": "pg@8.7.1", + "_inBundle": false, + "_integrity": "sha512-7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==", + "_location": "/pg", + "_phantomChildren": {}, + "_requested": { + "type": "tag", + "registry": true, + "raw": "pg", + "name": "pg", + "escapedName": "pg", + "rawSpec": "", + "saveSpec": null, + "fetchSpec": "latest" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz", + "_shasum": "9ea9d1ec225980c36f94e181d009ab9f4ce4c471", + "_spec": "pg", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Brian Carlson", + "email": "brian.m.carlson@gmail.com" + }, + "bugs": { + "url": "https://github.com/brianc/node-postgres/issues" + }, + "bundleDependencies": false, + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.4.1", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "deprecated": false, + "description": "PostgreSQL client - pure javascript & libpq with the same API", + "devDependencies": { + "async": "0.9.0", + "bluebird": "3.5.2", + "co": "4.6.0", + "pg-copy-streams": "0.3.0" + }, + "engines": { + "node": ">= 8.0.0" + }, + "files": [ + "lib", + "SPONSORS.md" + ], + "gitHead": "92b4d37926c276d343bfe56447ff6f526af757cf", + "homepage": "https://github.com/brianc/node-postgres", + "keywords": [ + "database", + "libpq", + "pg", + "postgre", + "postgres", + "postgresql", + "rdbms" + ], + "license": "MIT", + "main": "./lib", + "name": "pg", + "peerDependencies": { + "pg-native": ">=2.0.0" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg" + }, + "scripts": { + "test": "make test-all" + }, + "version": "8.7.1" +} diff --git a/reverse_engineering/node_modules/pgpass/README.md b/reverse_engineering/node_modules/pgpass/README.md new file mode 100644 index 0000000..bbc5193 --- /dev/null +++ b/reverse_engineering/node_modules/pgpass/README.md @@ -0,0 +1,74 @@ +# pgpass + +[![Build Status](https://github.com/hoegaarden/pgpass/workflows/CI/badge.svg?branch=master)](https://github.com/hoegaarden/pgpass/actions?query=workflow%3ACI+branch%3Amaster) + +## Install + +```sh +npm install pgpass +``` + +## Usage +```js +var pgPass = require('pgpass'); + +var connInfo = { + 'host' : 'pgserver' , + 'user' : 'the_user_name' , +}; + +pgPass(connInfo, function(pass){ + conn_info.password = pass; + // connect to postgresql server +}); +``` + +## Description + +This module tries to read the `~/.pgpass` file (or the equivalent for windows systems). If the environment variable `PGPASSFILE` is set, this file is used instead. If everything goes right, the password from said file is passed to the callback; if the password cannot be read `undefined` is passed to the callback. + +Cases where `undefined` is returned: + +- the environment variable `PGPASSWORD` is set +- the file cannot be read (wrong permissions, no such file, ...) +- for non windows systems: the file is write-/readable by the group or by other users +- there is no matching line for the given connection info + +There should be no need to use this module directly; it is already included in `node-postgres`. + +## Configuration + +The module reads the environment variable `PGPASS_NO_DEESCAPE` to decide if the the read tokens from the password file should be de-escaped or not. Default is to do de-escaping. For further information on this see [this commit](https://github.com/postgres/postgres/commit/8d15e3ec4fcb735875a8a70a09ec0c62153c3329). + + +## Tests + +There are tests in `./test/`; including linting and coverage testing. Running `npm test` runs: + +- `jshint` +- `mocha` tests +- `jscoverage` and `mocha -R html-cov` + +You can see the coverage report in `coverage.html`. + + +## Development, Patches, Bugs, ... + +If you find Bugs or have improvements, please feel free to open a issue on GitHub. If you provide a pull request, I'm more than happy to merge them, just make sure to add tests for your changes. + +## Links + +- https://github.com/hoegaarden/node-pgpass +- http://www.postgresql.org/docs/current/static/libpq-pgpass.html +- https://wiki.postgresql.org/wiki/Pgpass +- https://github.com/postgres/postgres/blob/master/src/interfaces/libpq/fe-connect.c + +## License + +Copyright (c) 2013-2016 Hannes Hörl + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/reverse_engineering/node_modules/pgpass/lib/helper.js b/reverse_engineering/node_modules/pgpass/lib/helper.js new file mode 100644 index 0000000..f988460 --- /dev/null +++ b/reverse_engineering/node_modules/pgpass/lib/helper.js @@ -0,0 +1,233 @@ +'use strict'; + +var path = require('path') + , Stream = require('stream').Stream + , split = require('split2') + , util = require('util') + , defaultPort = 5432 + , isWin = (process.platform === 'win32') + , warnStream = process.stderr +; + + +var S_IRWXG = 56 // 00070(8) + , S_IRWXO = 7 // 00007(8) + , S_IFMT = 61440 // 00170000(8) + , S_IFREG = 32768 // 0100000(8) +; +function isRegFile(mode) { + return ((mode & S_IFMT) == S_IFREG); +} + +var fieldNames = [ 'host', 'port', 'database', 'user', 'password' ]; +var nrOfFields = fieldNames.length; +var passKey = fieldNames[ nrOfFields -1 ]; + + +function warn() { + var isWritable = ( + warnStream instanceof Stream && + true === warnStream.writable + ); + + if (isWritable) { + var args = Array.prototype.slice.call(arguments).concat("\n"); + warnStream.write( util.format.apply(util, args) ); + } +} + + +Object.defineProperty(module.exports, 'isWin', { + get : function() { + return isWin; + } , + set : function(val) { + isWin = val; + } +}); + + +module.exports.warnTo = function(stream) { + var old = warnStream; + warnStream = stream; + return old; +}; + +module.exports.getFileName = function(rawEnv){ + var env = rawEnv || process.env; + var file = env.PGPASSFILE || ( + isWin ? + path.join( env.APPDATA || './' , 'postgresql', 'pgpass.conf' ) : + path.join( env.HOME || './', '.pgpass' ) + ); + return file; +}; + +module.exports.usePgPass = function(stats, fname) { + if (Object.prototype.hasOwnProperty.call(process.env, 'PGPASSWORD')) { + return false; + } + + if (isWin) { + return true; + } + + fname = fname || ''; + + if (! isRegFile(stats.mode)) { + warn('WARNING: password file "%s" is not a plain file', fname); + return false; + } + + if (stats.mode & (S_IRWXG | S_IRWXO)) { + /* If password file is insecure, alert the user and ignore it. */ + warn('WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less', fname); + return false; + } + + return true; +}; + + +var matcher = module.exports.match = function(connInfo, entry) { + return fieldNames.slice(0, -1).reduce(function(prev, field, idx){ + if (idx == 1) { + // the port + if ( Number( connInfo[field] || defaultPort ) === Number( entry[field] ) ) { + return prev && true; + } + } + return prev && ( + entry[field] === '*' || + entry[field] === connInfo[field] + ); + }, true); +}; + + +module.exports.getPassword = function(connInfo, stream, cb) { + var pass; + var lineStream = stream.pipe(split()); + + function onLine(line) { + var entry = parseLine(line); + if (entry && isValidEntry(entry) && matcher(connInfo, entry)) { + pass = entry[passKey]; + lineStream.end(); // -> calls onEnd(), but pass is set now + } + } + + var onEnd = function() { + stream.destroy(); + cb(pass); + }; + + var onErr = function(err) { + stream.destroy(); + warn('WARNING: error on reading file: %s', err); + cb(undefined); + }; + + stream.on('error', onErr); + lineStream + .on('data', onLine) + .on('end', onEnd) + .on('error', onErr) + ; + +}; + + +var parseLine = module.exports.parseLine = function(line) { + if (line.length < 11 || line.match(/^\s+#/)) { + return null; + } + + var curChar = ''; + var prevChar = ''; + var fieldIdx = 0; + var startIdx = 0; + var endIdx = 0; + var obj = {}; + var isLastField = false; + var addToObj = function(idx, i0, i1) { + var field = line.substring(i0, i1); + + if (! Object.hasOwnProperty.call(process.env, 'PGPASS_NO_DEESCAPE')) { + field = field.replace(/\\([:\\])/g, '$1'); + } + + obj[ fieldNames[idx] ] = field; + }; + + for (var i = 0 ; i < line.length-1 ; i += 1) { + curChar = line.charAt(i+1); + prevChar = line.charAt(i); + + isLastField = (fieldIdx == nrOfFields-1); + + if (isLastField) { + addToObj(fieldIdx, startIdx); + break; + } + + if (i >= 0 && curChar == ':' && prevChar !== '\\') { + addToObj(fieldIdx, startIdx, i+1); + + startIdx = i+2; + fieldIdx += 1; + } + } + + obj = ( Object.keys(obj).length === nrOfFields ) ? obj : null; + + return obj; +}; + + +var isValidEntry = module.exports.isValidEntry = function(entry){ + var rules = { + // host + 0 : function(x){ + return x.length > 0; + } , + // port + 1 : function(x){ + if (x === '*') { + return true; + } + x = Number(x); + return ( + isFinite(x) && + x > 0 && + x < 9007199254740992 && + Math.floor(x) === x + ); + } , + // database + 2 : function(x){ + return x.length > 0; + } , + // username + 3 : function(x){ + return x.length > 0; + } , + // password + 4 : function(x){ + return x.length > 0; + } + }; + + for (var idx = 0 ; idx < fieldNames.length ; idx += 1) { + var rule = rules[idx]; + var value = entry[ fieldNames[idx] ] || ''; + + var res = rule(value); + if (!res) { + return false; + } + } + + return true; +}; + diff --git a/reverse_engineering/node_modules/pgpass/lib/index.js b/reverse_engineering/node_modules/pgpass/lib/index.js new file mode 100644 index 0000000..ecfcf30 --- /dev/null +++ b/reverse_engineering/node_modules/pgpass/lib/index.js @@ -0,0 +1,23 @@ +'use strict'; + +var path = require('path') + , fs = require('fs') + , helper = require('./helper.js') +; + + +module.exports = function(connInfo, cb) { + var file = helper.getFileName(); + + fs.stat(file, function(err, stat){ + if (err || !helper.usePgPass(stat, file)) { + return cb(undefined); + } + + var st = fs.createReadStream(file); + + helper.getPassword(connInfo, st, cb); + }); +}; + +module.exports.warnTo = helper.warnTo; diff --git a/reverse_engineering/node_modules/pgpass/package.json b/reverse_engineering/node_modules/pgpass/package.json new file mode 100644 index 0000000..d503bd0 --- /dev/null +++ b/reverse_engineering/node_modules/pgpass/package.json @@ -0,0 +1,72 @@ +{ + "_from": "pgpass@1.x", + "_id": "pgpass@1.0.4", + "_inBundle": false, + "_integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==", + "_location": "/pgpass", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pgpass@1.x", + "name": "pgpass", + "escapedName": "pgpass", + "rawSpec": "1.x", + "saveSpec": null, + "fetchSpec": "1.x" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz", + "_shasum": "85eb93a83800b20f8057a2b029bf05abaf94ea9c", + "_spec": "pgpass@1.x", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", + "author": { + "name": "Hannes Hörl", + "email": "hannes.hoerl+pgpass@snowreporter.com" + }, + "bugs": { + "url": "https://github.com/hoegaarden/pgpass/issues" + }, + "bundleDependencies": false, + "dependencies": { + "split2": "^3.1.1" + }, + "deprecated": false, + "description": "Module for reading .pgpass", + "devDependencies": { + "jshint": "^2.12.0", + "mocha": "^8.2.0", + "nyc": "^15.1.0", + "pg": "^8.4.1", + "pg-escape": "^0.2.0", + "pg-native": "3.0.0", + "resumer": "0.0.0", + "tmp": "^0.2.1", + "which": "^2.0.2" + }, + "homepage": "https://github.com/hoegaarden/pgpass#readme", + "keywords": [ + "postgres", + "pg", + "pgpass", + "password", + "postgresql" + ], + "license": "MIT", + "main": "lib/index", + "name": "pgpass", + "repository": { + "type": "git", + "url": "git+https://github.com/hoegaarden/pgpass.git" + }, + "scripts": { + "_covered_test": "nyc --reporter html --reporter text \"$npm_execpath\" run _test", + "_hint": "jshint --exclude node_modules --verbose lib test", + "_test": "mocha --recursive -R list", + "pretest": "chmod 600 ./test/_pgpass", + "test": "\"$npm_execpath\" run _hint && \"$npm_execpath\" run _covered_test" + }, + "version": "1.0.4" +} diff --git a/reverse_engineering/node_modules/postgres-array/index.d.ts b/reverse_engineering/node_modules/postgres-array/index.d.ts new file mode 100644 index 0000000..88665bd --- /dev/null +++ b/reverse_engineering/node_modules/postgres-array/index.d.ts @@ -0,0 +1,4 @@ + +export function parse(source: string): string[]; +export function parse(source: string, transform: (value: string) => T): T[]; + diff --git a/reverse_engineering/node_modules/postgres-array/index.js b/reverse_engineering/node_modules/postgres-array/index.js new file mode 100644 index 0000000..18bfd16 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-array/index.js @@ -0,0 +1,97 @@ +'use strict' + +exports.parse = function (source, transform) { + return new ArrayParser(source, transform).parse() +} + +class ArrayParser { + constructor (source, transform) { + this.source = source + this.transform = transform || identity + this.position = 0 + this.entries = [] + this.recorded = [] + this.dimension = 0 + } + + isEof () { + return this.position >= this.source.length + } + + nextCharacter () { + var character = this.source[this.position++] + if (character === '\\') { + return { + value: this.source[this.position++], + escaped: true + } + } + return { + value: character, + escaped: false + } + } + + record (character) { + this.recorded.push(character) + } + + newEntry (includeEmpty) { + var entry + if (this.recorded.length > 0 || includeEmpty) { + entry = this.recorded.join('') + if (entry === 'NULL' && !includeEmpty) { + entry = null + } + if (entry !== null) entry = this.transform(entry) + this.entries.push(entry) + this.recorded = [] + } + } + + consumeDimensions () { + if (this.source[0] === '[') { + while (!this.isEof()) { + var char = this.nextCharacter() + if (char.value === '=') break + } + } + } + + parse (nested) { + var character, parser, quote + this.consumeDimensions() + while (!this.isEof()) { + character = this.nextCharacter() + if (character.value === '{' && !quote) { + this.dimension++ + if (this.dimension > 1) { + parser = new ArrayParser(this.source.substr(this.position - 1), this.transform) + this.entries.push(parser.parse(true)) + this.position += parser.position - 2 + } + } else if (character.value === '}' && !quote) { + this.dimension-- + if (!this.dimension) { + this.newEntry() + if (nested) return this.entries + } + } else if (character.value === '"' && !character.escaped) { + if (quote) this.newEntry(true) + quote = !quote + } else if (character.value === ',' && !quote) { + this.newEntry() + } else { + this.record(character.value) + } + } + if (this.dimension !== 0) { + throw new Error('array dimension not balanced') + } + return this.entries + } +} + +function identity (value) { + return value +} diff --git a/reverse_engineering/node_modules/postgres-array/license b/reverse_engineering/node_modules/postgres-array/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-array/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/postgres-array/package.json b/reverse_engineering/node_modules/postgres-array/package.json new file mode 100644 index 0000000..3d72116 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-array/package.json @@ -0,0 +1,67 @@ +{ + "_from": "postgres-array@~2.0.0", + "_id": "postgres-array@2.0.0", + "_inBundle": false, + "_integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "_location": "/postgres-array", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-array@~2.0.0", + "name": "postgres-array", + "escapedName": "postgres-array", + "rawSpec": "~2.0.0", + "saveSpec": null, + "fetchSpec": "~2.0.0" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "_shasum": "48f8fce054fbc69671999329b8834b772652d82e", + "_spec": "postgres-array@~2.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-array/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Parse postgres array columns", + "devDependencies": { + "standard": "^12.0.1", + "tape": "^4.0.0" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-array#readme", + "keywords": [ + "postgres", + "array", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-array", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-array.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "types": "index.d.ts", + "version": "2.0.0" +} diff --git a/reverse_engineering/node_modules/postgres-array/readme.md b/reverse_engineering/node_modules/postgres-array/readme.md new file mode 100644 index 0000000..b74b369 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-array/readme.md @@ -0,0 +1,43 @@ +# postgres-array [![Build Status](https://travis-ci.org/bendrucker/postgres-array.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-array) + +> Parse postgres array columns + + +## Install + +``` +$ npm install --save postgres-array +``` + + +## Usage + +```js +var postgresArray = require('postgres-array') + +postgresArray.parse('{1,2,3}', (value) => parseInt(value, 10)) +//=> [1, 2, 3] +``` + +## API + +#### `parse(input, [transform])` -> `array` + +##### input + +*Required* +Type: `string` + +A Postgres array string. + +##### transform + +Type: `function` +Default: `identity` + +A function that transforms non-null values inserted into the array. + + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/reverse_engineering/node_modules/postgres-bytea/index.js b/reverse_engineering/node_modules/postgres-bytea/index.js new file mode 100644 index 0000000..d1107a0 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-bytea/index.js @@ -0,0 +1,31 @@ +'use strict' + +module.exports = function parseBytea (input) { + if (/^\\x/.test(input)) { + // new 'hex' style response (pg >9.0) + return new Buffer(input.substr(2), 'hex') + } + var output = '' + var i = 0 + while (i < input.length) { + if (input[i] !== '\\') { + output += input[i] + ++i + } else { + if (/[0-7]{3}/.test(input.substr(i + 1, 3))) { + output += String.fromCharCode(parseInt(input.substr(i + 1, 3), 8)) + i += 4 + } else { + var backslashes = 1 + while (i + backslashes < input.length && input[i + backslashes] === '\\') { + backslashes++ + } + for (var k = 0; k < Math.floor(backslashes / 2); ++k) { + output += '\\' + } + i += Math.floor(backslashes / 2) * 2 + } + } + } + return new Buffer(output, 'binary') +} diff --git a/reverse_engineering/node_modules/postgres-bytea/license b/reverse_engineering/node_modules/postgres-bytea/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-bytea/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/postgres-bytea/package.json b/reverse_engineering/node_modules/postgres-bytea/package.json new file mode 100644 index 0000000..5a00b5d --- /dev/null +++ b/reverse_engineering/node_modules/postgres-bytea/package.json @@ -0,0 +1,66 @@ +{ + "_from": "postgres-bytea@~1.0.0", + "_id": "postgres-bytea@1.0.0", + "_inBundle": false, + "_integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=", + "_location": "/postgres-bytea", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-bytea@~1.0.0", + "name": "postgres-bytea", + "escapedName": "postgres-bytea", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "_shasum": "027b533c0aa890e26d172d47cf9ccecc521acd35", + "_spec": "postgres-bytea@~1.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-bytea/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Postgres bytea parser", + "devDependencies": { + "standard": "^4.0.0", + "tape": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-bytea#readme", + "keywords": [ + "bytea", + "postgres", + "binary", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-bytea", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-bytea.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.0.0" +} diff --git a/reverse_engineering/node_modules/postgres-bytea/readme.md b/reverse_engineering/node_modules/postgres-bytea/readme.md new file mode 100644 index 0000000..4939c3b --- /dev/null +++ b/reverse_engineering/node_modules/postgres-bytea/readme.md @@ -0,0 +1,34 @@ +# postgres-bytea [![Build Status](https://travis-ci.org/bendrucker/postgres-bytea.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-bytea) + +> Postgres bytea parser + + +## Install + +``` +$ npm install --save postgres-bytea +``` + + +## Usage + +```js +var bytea = require('postgres-bytea'); +bytea('\\000\\100\\200') +//=> buffer +``` + +## API + +#### `bytea(input)` -> `buffer` + +##### input + +*Required* +Type: `string` + +A Postgres bytea binary string. + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/reverse_engineering/node_modules/postgres-date/index.js b/reverse_engineering/node_modules/postgres-date/index.js new file mode 100644 index 0000000..5dc73fb --- /dev/null +++ b/reverse_engineering/node_modules/postgres-date/index.js @@ -0,0 +1,116 @@ +'use strict' + +var DATE_TIME = /(\d{1,})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?.*?( BC)?$/ +var DATE = /^(\d{1,})-(\d{2})-(\d{2})( BC)?$/ +var TIME_ZONE = /([Z+-])(\d{2})?:?(\d{2})?:?(\d{2})?/ +var INFINITY = /^-?infinity$/ + +module.exports = function parseDate (isoDate) { + if (INFINITY.test(isoDate)) { + // Capitalize to Infinity before passing to Number + return Number(isoDate.replace('i', 'I')) + } + var matches = DATE_TIME.exec(isoDate) + + if (!matches) { + // Force YYYY-MM-DD dates to be parsed as local time + return getDate(isoDate) || null + } + + var isBC = !!matches[8] + var year = parseInt(matches[1], 10) + if (isBC) { + year = bcYearToNegativeYear(year) + } + + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + var hour = parseInt(matches[4], 10) + var minute = parseInt(matches[5], 10) + var second = parseInt(matches[6], 10) + + var ms = matches[7] + ms = ms ? 1000 * parseFloat(ms) : 0 + + var date + var offset = timeZoneOffset(isoDate) + if (offset != null) { + date = new Date(Date.UTC(year, month, day, hour, minute, second, ms)) + + // Account for years from 0 to 99 being interpreted as 1900-1999 + // by Date.UTC / the multi-argument form of the Date constructor + if (is0To99(year)) { + date.setUTCFullYear(year) + } + + if (offset !== 0) { + date.setTime(date.getTime() - offset) + } + } else { + date = new Date(year, month, day, hour, minute, second, ms) + + if (is0To99(year)) { + date.setFullYear(year) + } + } + + return date +} + +function getDate (isoDate) { + var matches = DATE.exec(isoDate) + if (!matches) { + return + } + + var year = parseInt(matches[1], 10) + var isBC = !!matches[4] + if (isBC) { + year = bcYearToNegativeYear(year) + } + + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + // YYYY-MM-DD will be parsed as local time + var date = new Date(year, month, day) + + if (is0To99(year)) { + date.setFullYear(year) + } + + return date +} + +// match timezones: +// Z (UTC) +// -05 +// +06:30 +function timeZoneOffset (isoDate) { + if (isoDate.endsWith('+00')) { + return 0 + } + + var zone = TIME_ZONE.exec(isoDate.split(' ')[1]) + if (!zone) return + var type = zone[1] + + if (type === 'Z') { + return 0 + } + var sign = type === '-' ? -1 : 1 + var offset = parseInt(zone[2], 10) * 3600 + + parseInt(zone[3] || 0, 10) * 60 + + parseInt(zone[4] || 0, 10) + + return offset * sign * 1000 +} + +function bcYearToNegativeYear (year) { + // Account for numerical difference between representations of BC years + // See: https://github.com/bendrucker/postgres-date/issues/5 + return -(year - 1) +} + +function is0To99 (num) { + return num >= 0 && num < 100 +} diff --git a/reverse_engineering/node_modules/postgres-date/license b/reverse_engineering/node_modules/postgres-date/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-date/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/postgres-date/package.json b/reverse_engineering/node_modules/postgres-date/package.json new file mode 100644 index 0000000..072f85c --- /dev/null +++ b/reverse_engineering/node_modules/postgres-date/package.json @@ -0,0 +1,65 @@ +{ + "_from": "postgres-date@~1.0.4", + "_id": "postgres-date@1.0.7", + "_inBundle": false, + "_integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "_location": "/postgres-date", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-date@~1.0.4", + "name": "postgres-date", + "escapedName": "postgres-date", + "rawSpec": "~1.0.4", + "saveSpec": null, + "fetchSpec": "~1.0.4" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "_shasum": "51bc086006005e5061c591cee727f2531bf641a8", + "_spec": "postgres-date@~1.0.4", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-date/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Postgres date column parser", + "devDependencies": { + "standard": "^14.0.0", + "tape": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-date#readme", + "keywords": [ + "postgres", + "date", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-date", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-date.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.0.7" +} diff --git a/reverse_engineering/node_modules/postgres-date/readme.md b/reverse_engineering/node_modules/postgres-date/readme.md new file mode 100644 index 0000000..095431a --- /dev/null +++ b/reverse_engineering/node_modules/postgres-date/readme.md @@ -0,0 +1,49 @@ +# postgres-date [![Build Status](https://travis-ci.org/bendrucker/postgres-date.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-date) [![Greenkeeper badge](https://badges.greenkeeper.io/bendrucker/postgres-date.svg)](https://greenkeeper.io/) + +> Postgres date output parser + +This package parses [date/time outputs](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME-OUTPUT) from Postgres into Javascript `Date` objects. Its goal is to match Postgres behavior and preserve data accuracy. + +If you find a case where a valid Postgres output results in incorrect parsing (including loss of precision), please [create a pull request](https://github.com/bendrucker/postgres-date/compare) and provide a failing test. + +**Supported Postgres Versions:** `>= 9.6` + +All prior versions of Postgres are likely compatible but not officially supported. + +## Install + +``` +$ npm install --save postgres-date +``` + + +## Usage + +```js +var parse = require('postgres-date') +parse('2011-01-23 22:15:51Z') +// => 2011-01-23T22:15:51.000Z +``` + +## API + +#### `parse(isoDate)` -> `date` + +##### isoDate + +*Required* +Type: `string` + +A date string from Postgres. + +## Releases + +The following semantic versioning increments will be used for changes: + +* **Major**: Removal of support for Node.js versions or Postgres versions (not expected) +* **Minor**: Unused, since Postgres returns dates in standard ISO 8601 format +* **Patch**: Any fix for parsing behavior + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/reverse_engineering/node_modules/postgres-interval/index.d.ts b/reverse_engineering/node_modules/postgres-interval/index.d.ts new file mode 100644 index 0000000..f82b4c3 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-interval/index.d.ts @@ -0,0 +1,20 @@ +declare namespace PostgresInterval { + export interface IPostgresInterval { + years?: number; + months?: number; + days?: number; + hours?: number; + minutes?: number; + seconds?: number; + milliseconds?: number; + + toPostgres(): string; + + toISO(): string; + toISOString(): string; + } +} + +declare function PostgresInterval(raw: string): PostgresInterval.IPostgresInterval; + +export = PostgresInterval; diff --git a/reverse_engineering/node_modules/postgres-interval/index.js b/reverse_engineering/node_modules/postgres-interval/index.js new file mode 100644 index 0000000..8ecca80 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-interval/index.js @@ -0,0 +1,125 @@ +'use strict' + +var extend = require('xtend/mutable') + +module.exports = PostgresInterval + +function PostgresInterval (raw) { + if (!(this instanceof PostgresInterval)) { + return new PostgresInterval(raw) + } + extend(this, parse(raw)) +} +var properties = ['seconds', 'minutes', 'hours', 'days', 'months', 'years'] +PostgresInterval.prototype.toPostgres = function () { + var filtered = properties.filter(this.hasOwnProperty, this) + + // In addition to `properties`, we need to account for fractions of seconds. + if (this.milliseconds && filtered.indexOf('seconds') < 0) { + filtered.push('seconds') + } + + if (filtered.length === 0) return '0' + return filtered + .map(function (property) { + var value = this[property] || 0 + + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/\.?0+$/, '') + } + + return value + ' ' + property + }, this) + .join(' ') +} + +var propertiesISOEquivalent = { + years: 'Y', + months: 'M', + days: 'D', + hours: 'H', + minutes: 'M', + seconds: 'S' +} +var dateProperties = ['years', 'months', 'days'] +var timeProperties = ['hours', 'minutes', 'seconds'] +// according to ISO 8601 +PostgresInterval.prototype.toISOString = PostgresInterval.prototype.toISO = function () { + var datePart = dateProperties + .map(buildProperty, this) + .join('') + + var timePart = timeProperties + .map(buildProperty, this) + .join('') + + return 'P' + datePart + 'T' + timePart + + function buildProperty (property) { + var value = this[property] || 0 + + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/0+$/, '') + } + + return value + propertiesISOEquivalent[property] + } +} + +var NUMBER = '([+-]?\\d+)' +var YEAR = NUMBER + '\\s+years?' +var MONTH = NUMBER + '\\s+mons?' +var DAY = NUMBER + '\\s+days?' +var TIME = '([+-])?([\\d]*):(\\d\\d):(\\d\\d)\\.?(\\d{1,6})?' +var INTERVAL = new RegExp([YEAR, MONTH, DAY, TIME].map(function (regexString) { + return '(' + regexString + ')?' +}) + .join('\\s*')) + +// Positions of values in regex match +var positions = { + years: 2, + months: 4, + days: 6, + hours: 9, + minutes: 10, + seconds: 11, + milliseconds: 12 +} +// We can use negative time +var negatives = ['hours', 'minutes', 'seconds', 'milliseconds'] + +function parseMilliseconds (fraction) { + // add omitted zeroes + var microseconds = fraction + '000000'.slice(fraction.length) + return parseInt(microseconds, 10) / 1000 +} + +function parse (interval) { + if (!interval) return {} + var matches = INTERVAL.exec(interval) + var isNegative = matches[8] === '-' + return Object.keys(positions) + .reduce(function (parsed, property) { + var position = positions[property] + var value = matches[position] + // no empty string + if (!value) return parsed + // milliseconds are actually microseconds (up to 6 digits) + // with omitted trailing zeroes. + value = property === 'milliseconds' + ? parseMilliseconds(value) + : parseInt(value, 10) + // no zeros + if (!value) return parsed + if (isNegative && ~negatives.indexOf(property)) { + value *= -1 + } + parsed[property] = value + return parsed + }, {}) +} diff --git a/reverse_engineering/node_modules/postgres-interval/license b/reverse_engineering/node_modules/postgres-interval/license new file mode 100644 index 0000000..25c6247 --- /dev/null +++ b/reverse_engineering/node_modules/postgres-interval/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/postgres-interval/package.json b/reverse_engineering/node_modules/postgres-interval/package.json new file mode 100644 index 0000000..50acc4b --- /dev/null +++ b/reverse_engineering/node_modules/postgres-interval/package.json @@ -0,0 +1,68 @@ +{ + "_from": "postgres-interval@^1.1.0", + "_id": "postgres-interval@1.2.0", + "_inBundle": false, + "_integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "_location": "/postgres-interval", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-interval@^1.1.0", + "name": "postgres-interval", + "escapedName": "postgres-interval", + "rawSpec": "^1.1.0", + "saveSpec": null, + "fetchSpec": "^1.1.0" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "_shasum": "b460c82cb1587507788819a06aa0fffdb3544695", + "_spec": "postgres-interval@^1.1.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-interval/issues" + }, + "bundleDependencies": false, + "dependencies": { + "xtend": "^4.0.0" + }, + "deprecated": false, + "description": "Parse Postgres interval columns", + "devDependencies": { + "standard": "^12.0.1", + "tape": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-interval#readme", + "keywords": [ + "postgres", + "interval", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-interval", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-interval.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.2.0" +} diff --git a/reverse_engineering/node_modules/postgres-interval/readme.md b/reverse_engineering/node_modules/postgres-interval/readme.md new file mode 100644 index 0000000..53cda4a --- /dev/null +++ b/reverse_engineering/node_modules/postgres-interval/readme.md @@ -0,0 +1,48 @@ +# postgres-interval [![Build Status](https://travis-ci.org/bendrucker/postgres-interval.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-interval) [![Greenkeeper badge](https://badges.greenkeeper.io/bendrucker/postgres-interval.svg)](https://greenkeeper.io/) + +> Parse Postgres interval columns + + +## Install + +``` +$ npm install --save postgres-interval +``` + + +## Usage + +```js +var parse = require('postgres-interval') +var interval = parse('01:02:03') +//=> {hours: 1, minutes: 2, seconds: 3} +interval.toPostgres() +// 3 seconds 2 minutes 1 hours +interval.toISO() +// P0Y0M0DT1H2M3S +``` + +## API + +#### `parse(pgInterval)` -> `interval` + +##### pgInterval + +*Required* +Type: `string` + +A Postgres interval string. + +#### `interval.toPostgres()` -> `string` + +Returns an interval string. This allows the interval object to be passed into prepared statements. + +#### `interval.toISOString()` -> `string` + +Returns an [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) compliant string. + +Also available as `interval.toISO()` for backwards compatibility. + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/reverse_engineering/node_modules/readable-stream/CONTRIBUTING.md b/reverse_engineering/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/reverse_engineering/node_modules/readable-stream/GOVERNANCE.md b/reverse_engineering/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/reverse_engineering/node_modules/readable-stream/LICENSE b/reverse_engineering/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/reverse_engineering/node_modules/readable-stream/README.md b/reverse_engineering/node_modules/readable-stream/README.md new file mode 100644 index 0000000..6f035ab --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.19.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/reverse_engineering/node_modules/readable-stream/errors-browser.js b/reverse_engineering/node_modules/readable-stream/errors-browser.js new file mode 100644 index 0000000..fb8e73e --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/reverse_engineering/node_modules/readable-stream/errors.js b/reverse_engineering/node_modules/readable-stream/errors.js new file mode 100644 index 0000000..8471526 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/reverse_engineering/node_modules/readable-stream/experimentalWarning.js b/reverse_engineering/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 0000000..78e8414 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/reverse_engineering/node_modules/readable-stream/lib/_stream_duplex.js b/reverse_engineering/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..6752519 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,139 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. +'use strict'; +/**/ + +var objectKeys = Object.keys || function (obj) { + var keys = []; + + for (var key in obj) { + keys.push(key); + } + + return keys; +}; +/**/ + + +module.exports = Duplex; + +var Readable = require('./_stream_readable'); + +var Writable = require('./_stream_writable'); + +require('inherits')(Duplex, Readable); + +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); // the no-half-open enforcer + +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; // no more data can be written. + // But allow more writes to happen in this tick. + + process.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/_stream_passthrough.js b/reverse_engineering/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..32e7414 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,39 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +require('inherits')(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/_stream_readable.js b/reverse_engineering/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..192d451 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1124 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +'use strict'; + +module.exports = Readable; +/**/ + +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; +/**/ + +var EE = require('events').EventEmitter; + +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ + + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +/**/ + + +var debugUtil = require('util'); + +var debug; + +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + + +var BufferList = require('./internal/streams/buffer_list'); + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. + + +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; + +require('inherits')(Readable, Stream); + +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + + this.sync = true; // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') + + this.autoDestroy = !!options.autoDestroy; // has it been destroyed + + this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s + + this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled + + this.readingMore = false; + this.decoder = null; + this.encoding = null; + + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); // legacy + + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; + +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; // Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. + + +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; // Unshift should *always* be something directly out of read() + + +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + + + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + + return er; +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; // backwards compatibility. + + +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 + + this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: + + var p = this._readableState.buffer.head; + var content = ''; + + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + + this._readableState.buffer.clear(); + + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; // Don't raise the hwm > 1GB + + +var MAX_HWM = 0x40000000; + +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + + return n; +} // This function is designed to be inlinable, so please take care when making +// changes to the function body. + + +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } // If we're asking for more than the current hwm, then raise the hwm. + + + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; // Don't have enough + + if (!state.ended) { + state.needReadable = true; + return 0; + } + + return state.length; +} // you can override either this method, or the async _read(n) below. + + +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. + + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + // if we need a readable event, then we need to do some reading. + + + var doRead = state.needReadable; + debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some + + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + + + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; // if the length is currently zero, then we *need* a readable event. + + if (state.length === 0) state.needReadable = true; // call internal read method + + this._read(state.highWaterMark); + + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. + + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + return ret; +}; + +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + + if (state.decoder) { + var chunk = state.decoder.end(); + + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + + state.ended = true; + + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} // Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. + + +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} + +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + + + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} // at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. + + +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) // didn't get any data, stop spinning. + break; + } + + state.readingMore = false; +} // abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. + + +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + + case 1: + state.pipes = [state.pipes, dest]; + break; + + default: + state.pipes.push(dest); + break; + } + + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + + + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + + function cleanup() { + debug('cleanup'); // cleanup event handlers once the pipe is broken + + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + src.on('data', ondata); + + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + + src.pause(); + } + } // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + + + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } // Make sure our error handler is attached before userland ones. + + + prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. + + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + + dest.once('close', onclose); + + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } // tell the dest that it's being piped to + + + dest.emit('pipe', src); // start the flow if it hasn't been started already. + + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; // if we're not piping anywhere, then do nothing. + + if (state.pipesCount === 0) return this; // just one destination. most common case. + + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; // got a match. + + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } // slow case. multiple pipe destinations. + + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + } + + return this; + } // try to find the right one. + + + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; // set up data events if they are asked for +// Ensure readable listeners eventually get something + + +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused + + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + + return res; +}; + +Readable.prototype.addListener = Readable.prototype.on; + +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + + return res; +}; + +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} // pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. + + +Readable.prototype.resume = function () { + var state = this._readableState; + + if (!state.flowing) { + debug('resume'); // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + + state.flowing = !state.readableListening; + resume(this, state); + } + + state.paused = false; + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + debug('resume', state.reading); + + if (!state.reading) { + stream.read(0); + } + + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + + this._readableState.paused = true; + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + + while (state.flowing && stream.read() !== null) { + ; + } +} // wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. + + +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode + + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + + if (!ret) { + paused = true; + stream.pause(); + } + }); // proxy all the other methods. + // important when wrapping filters and duplexes. + + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } // proxy certain important events. + + + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } // when we try to consume some more bytes, simply unpause the + // underlying stream. + + + this._read = function (n) { + debug('wrapped _read', n); + + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + + return createReadableStreamAsyncIterator(this); + }; +} + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); // exposed for testing purposes only. + +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); // Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. + +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. + + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} + +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + + return from(Readable, iterable, opts); + }; +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + + return -1; +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/_stream_transform.js b/reverse_engineering/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..41a738c --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,201 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. +'use strict'; + +module.exports = Transform; + +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; + +var Duplex = require('./_stream_duplex'); + +require('inherits')(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + + ts.writechunk = null; + ts.writecb = null; + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; // start out asking for a readable event once data is transformed. + + this._readableState.needReadable = true; // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } // When the writable side finishes, then flush out anything remaining. + + + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; // This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. + + +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; // Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. + + +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/_stream_writable.js b/reverse_engineering/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..a2634d7 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,697 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. +'use strict'; + +module.exports = Writable; +/* */ + +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} // It seems a linked list but it is not +// there will be only 2 of these for each stream + + +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ + + +var Duplex; +/**/ + +Writable.WritableState = WritableState; +/**/ + +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ + +var Stream = require('./internal/streams/stream'); +/**/ + + +var Buffer = require('buffer').Buffer; + +var OurUint8Array = global.Uint8Array || function () {}; + +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} + +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +var destroyImpl = require('./internal/streams/destroy'); + +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; + +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; + +var errorOrDestroy = destroyImpl.errorOrDestroy; + +require('inherits')(Writable, Stream); + +function nop() {} + +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream + // contains buffers or objects. + + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called + + this.finalCalled = false; // drain event flag. + + this.needDrain = false; // at the start of calling end() + + this.ending = false; // when end() has been called, and returned + + this.ended = false; // when 'finish' is emitted + + this.finished = false; // has it been destroyed + + this.destroyed = false; // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + + this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + + this.length = 0; // a flag to see when we're in the middle of a write. + + this.writing = false; // when true all writes will be buffered until .uncork() call + + this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + + this.sync = true; // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + + this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) + + this.onwrite = function (er) { + onwrite(stream, er); + }; // the callback that the user supplies to write(chunk,encoding,cb) + + + this.writecb = null; // the amount that is being written when _write is called. + + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + + this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + + this.prefinished = false; // True if the error was already emitted and should not be thrown again + + this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. + + this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') + + this.autoDestroy = !!options.autoDestroy; // count buffered requests + + this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + + while (current) { + out.push(current); + current = current.next; + } + + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); // Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. + + +var realHasInstance; + +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); // legacy. + + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} // Otherwise people can pipe Writable streams, which is just wrong. + + +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; + +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb + + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} // Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. + + +function validChunk(stream, state, chunk, cb) { + var er; + + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + + return true; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; + +Writable.prototype.cork = function () { + this._writableState.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); // if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. + +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. + + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); // this can emit finish, and it will always happen + // after error + + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); // this can emit finish, but finish must + // always follow error + + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} // Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. + + +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} // if there's something in the buffer waiting, then process it + + +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + + state.pendingcb++; + state.lastBufferedRequest = null; + + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks + + if (state.corked) { + state.corked = 1; + this.uncork(); + } // ignore unnecessary end() calls. + + + if (!state.ending) endWritable(this, state, cb); + return this; +}; + +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} + +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + + if (err) { + errorOrDestroy(stream, err); + } + + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} + +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + + if (need) { + prefinish(stream, state); + + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } // reuse the free corkReq. + + + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } // backward compatibility, the user is explicitly + // managing destroyed + + + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; + +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 0000000..9fb615a --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,207 @@ +'use strict'; + +var _Object$setPrototypeO; + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var finished = require('./end-of-stream'); + +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); + +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} + +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + + if (resolve !== null) { + var data = iter[kStream].read(); // we defer if data is null + // we can be expecting either 'end' or + // 'error' + + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} + +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} + +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} + +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + + next: function next() { + var _this = this; + + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + + if (error !== null) { + return Promise.reject(error); + } + + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + + + var lastPromise = this[kLastPromise]; + var promise; + + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + + promise = new Promise(this[kHandlePromise]); + } + + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); + +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise + // returned by next() and store the error + + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + + iterator[kError] = err; + return; + } + + var resolve = iterator[kLastResolve]; + + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; + +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000..cdea425 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,210 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } + +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); return Constructor; } + +var _require = require('buffer'), + Buffer = _require.Buffer; + +var _require2 = require('util'), + inspect = _require2.inspect; + +var custom = inspect && inspect.custom || 'inspect'; + +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} + +module.exports = +/*#__PURE__*/ +function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + + while (p = p.next) { + ret += s + p.data; + } + + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + + return ret; + } // Consumes a specified amount of bytes or characters from the buffered data. + + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } // Consumes a specified amount of characters from the buffered data. + + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Consumes a specified amount of bytes from the buffered data. + + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + + break; + } + + ++c; + } + + this.length -= c; + return ret; + } // Make sure the linked list only shows the minimal necessary information. + + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread({}, options, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + + return BufferList; +}(); \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/destroy.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..3268a16 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,105 @@ +'use strict'; // undocumented cb() API, needed for core, not for public API + +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + + return this; + } // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + + if (this._readableState) { + this._readableState.destroyed = true; + } // if this is a duplex stream mark the writable part as destroyed as well + + + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + + return this; +} + +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} + +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000..831f286 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,104 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + + callback.apply(this, args); + }; +} + +function noop() {} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + + var writableEnded = stream._writableState && stream._writableState.finished; + + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + + var readableEnded = stream._readableState && stream._readableState.endEmitted; + + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + + var onerror = function onerror(err) { + callback.call(stream, err); + }; + + var onclose = function onclose() { + var err; + + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} + +module.exports = eos; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/from-browser.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 0000000..a4ce56f --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/from.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 0000000..6c41284 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,64 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } + +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); keys.push.apply(keys, symbols); } return keys; } + +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; } + +function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } + +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; + +function from(Readable, iterable, opts) { + var iterator; + + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); // Reading boolean to protect against _read + // being called before last iteration completion. + + var reading = false; + + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + + function next() { + return _next2.apply(this, arguments); + } + + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _ref = yield iterator.next(), + value = _ref.value, + done = _ref.done; + + if (done) { + readable.push(null); + } else if (readable.push((yield value))) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + + return readable; +} + +module.exports = from; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/pipeline.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 0000000..6589909 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,97 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). +'use strict'; + +var eos; + +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} + +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; + +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} + +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} + +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; // request.destroy just do .end - .abort is what we want + + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} + +function call(fn) { + fn(); +} + +function pipe(from, to) { + return from.pipe(to); +} + +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} + +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} + +module.exports = pipeline; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/state.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 0000000..19887eb --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,27 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; + +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} + +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + + return Math.floor(hwm); + } // Default value + + + return state.objectMode ? 16 : 16 * 1024; +} + +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream.js b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/reverse_engineering/node_modules/readable-stream/package.json b/reverse_engineering/node_modules/readable-stream/package.json new file mode 100644 index 0000000..7878db8 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/package.json @@ -0,0 +1,97 @@ +{ + "_from": "readable-stream@^3.0.0", + "_id": "readable-stream@3.6.0", + "_inBundle": false, + "_integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "_location": "/readable-stream", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "readable-stream@^3.0.0", + "name": "readable-stream", + "escapedName": "readable-stream", + "rawSpec": "^3.0.0", + "saveSpec": null, + "fetchSpec": "^3.0.0" + }, + "_requiredBy": [ + "/split2" + ], + "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "_shasum": "337bbda3adc0706bd3e024426a286d4b4b2c9198", + "_spec": "readable-stream@^3.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/split2", + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "bugs": { + "url": "https://github.com/nodejs/readable-stream/issues" + }, + "bundleDependencies": false, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "deprecated": false, + "description": "Streams3, a user-land copy of the stream library from Node.js", + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "engines": { + "node": ">= 6" + }, + "homepage": "https://github.com/nodejs/readable-stream#readme", + "keywords": [ + "readable", + "stream", + "pipe" + ], + "license": "MIT", + "main": "readable.js", + "name": "readable-stream", + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream.git" + }, + "scripts": { + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "version": "3.6.0" +} diff --git a/reverse_engineering/node_modules/readable-stream/readable-browser.js b/reverse_engineering/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..adbf60d --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/reverse_engineering/node_modules/readable-stream/readable.js b/reverse_engineering/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..9e0ca12 --- /dev/null +++ b/reverse_engineering/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/reverse_engineering/node_modules/safe-buffer/LICENSE b/reverse_engineering/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/reverse_engineering/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/safe-buffer/README.md b/reverse_engineering/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/reverse_engineering/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/reverse_engineering/node_modules/safe-buffer/index.d.ts b/reverse_engineering/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/reverse_engineering/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/safe-buffer/index.js b/reverse_engineering/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..f8d3ec9 --- /dev/null +++ b/reverse_engineering/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/reverse_engineering/node_modules/safe-buffer/package.json b/reverse_engineering/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..098d7d6 --- /dev/null +++ b/reverse_engineering/node_modules/safe-buffer/package.json @@ -0,0 +1,76 @@ +{ + "_from": "safe-buffer@~5.2.0", + "_id": "safe-buffer@5.2.1", + "_inBundle": false, + "_integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "_location": "/safe-buffer", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "safe-buffer@~5.2.0", + "name": "safe-buffer", + "escapedName": "safe-buffer", + "rawSpec": "~5.2.0", + "saveSpec": null, + "fetchSpec": "~5.2.0" + }, + "_requiredBy": [ + "/string_decoder" + ], + "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "_shasum": "1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6", + "_spec": "safe-buffer@~5.2.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/string_decoder", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Safer Node.js Buffer API", + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "name": "safe-buffer", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "types": "index.d.ts", + "version": "5.2.1" +} diff --git a/reverse_engineering/node_modules/safer-buffer/LICENSE b/reverse_engineering/node_modules/safer-buffer/LICENSE new file mode 100644 index 0000000..4fe9e6f --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Nikita Skovoroda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/reverse_engineering/node_modules/safer-buffer/Porting-Buffer.md b/reverse_engineering/node_modules/safer-buffer/Porting-Buffer.md new file mode 100644 index 0000000..68d86ba --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/Porting-Buffer.md @@ -0,0 +1,268 @@ +# Porting to the Buffer.from/Buffer.alloc API + + +## Overview + +- [Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x.](#variant-1) (*recommended*) +- [Variant 2: Use a polyfill](#variant-2) +- [Variant 3: manual detection, with safeguards](#variant-3) + +### Finding problematic bits of code using grep + +Just run `grep -nrE '[^a-zA-Z](Slow)?Buffer\s*\(' --exclude-dir node_modules`. + +It will find all the potentially unsafe places in your own code (with some considerably unlikely +exceptions). + +### Finding problematic bits of code using Node.js 8 + +If you’re using Node.js ≥ 8.0.0 (which is recommended), Node.js exposes multiple options that help with finding the relevant pieces of code: + +- `--trace-warnings` will make Node.js show a stack trace for this warning and other warnings that are printed by Node.js. +- `--trace-deprecation` does the same thing, but only for deprecation warnings. +- `--pending-deprecation` will show more types of deprecation warnings. In particular, it will show the `Buffer()` deprecation warning, even on Node.js 8. + +You can set these flags using an environment variable: + +```console +$ export NODE_OPTIONS='--trace-warnings --pending-deprecation' +$ cat example.js +'use strict'; +const foo = new Buffer('foo'); +$ node example.js +(node:7147) [DEP0005] DeprecationWarning: The Buffer() and new Buffer() constructors are not recommended for use due to security and usability concerns. Please use the new Buffer.alloc(), Buffer.allocUnsafe(), or Buffer.from() construction methods instead. + at showFlaggedDeprecation (buffer.js:127:13) + at new Buffer (buffer.js:148:3) + at Object. (/path/to/example.js:2:13) + [... more stack trace lines ...] +``` + +### Finding problematic bits of code using linters + +Eslint rules [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +also find calls to deprecated `Buffer()` API. Those rules are included in some pre-sets. + +There is a drawback, though, that it doesn't always +[work correctly](https://github.com/chalker/safer-buffer#why-not-safe-buffer) when `Buffer` is +overriden e.g. with a polyfill, so recommended is a combination of this and some other method +described above. + + +## Variant 1: Drop support for Node.js ≤ 4.4.x and 5.0.0 — 5.9.x. + +This is the recommended solution nowadays that would imply only minimal overhead. + +The Node.js 5.x release line has been unsupported since July 2016, and the Node.js 4.x release line reaches its End of Life in April 2018 (→ [Schedule](https://github.com/nodejs/Release#release-schedule)). This means that these versions of Node.js will *not* receive any updates, even in case of security issues, so using these release lines should be avoided, if at all possible. + +What you would do in this case is to convert all `new Buffer()` or `Buffer()` calls to use `Buffer.alloc()` or `Buffer.from()`, in the following way: + +- For `new Buffer(number)`, replace it with `Buffer.alloc(number)`. +- For `new Buffer(string)` (or `new Buffer(string, encoding)`), replace it with `Buffer.from(string)` (or `Buffer.from(string, encoding)`). +- For all other combinations of arguments (these are much rarer), also replace `new Buffer(...arguments)` with `Buffer.from(...arguments)`. + +Note that `Buffer.alloc()` is also _faster_ on the current Node.js versions than +`new Buffer(size).fill(0)`, which is what you would otherwise need to ensure zero-filling. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended to avoid accidential unsafe Buffer API usage. + +There is also a [JSCodeshift codemod](https://github.com/joyeecheung/node-dep-codemod#dep005) +for automatically migrating Buffer constructors to `Buffer.alloc()` or `Buffer.from()`. +Note that it currently only works with cases where the arguments are literals or where the +constructor is invoked with two arguments. + +_If you currently support those older Node.js versions and dropping them would be a semver-major change +for you, or if you support older branches of your packages, consider using [Variant 2](#variant-2) +or [Variant 3](#variant-3) on older branches, so people using those older branches will also receive +the fix. That way, you will eradicate potential issues caused by unguarded Buffer API usage and +your users will not observe a runtime deprecation warning when running your code on Node.js 10._ + + +## Variant 2: Use a polyfill + +Utilize [safer-buffer](https://www.npmjs.com/package/safer-buffer) as a polyfill to support older +Node.js versions. + +You would take exacly the same steps as in [Variant 1](#variant-1), but with a polyfill +`const Buffer = require('safer-buffer').Buffer` in all files where you use the new `Buffer` api. + +Make sure that you do not use old `new Buffer` API — in any files where the line above is added, +using old `new Buffer()` API will _throw_. It will be easy to notice that in CI, though. + +Alternatively, you could use [buffer-from](https://www.npmjs.com/package/buffer-from) and/or +[buffer-alloc](https://www.npmjs.com/package/buffer-alloc) [ponyfills](https://ponyfill.com/) — +those are great, the only downsides being 4 deps in the tree and slightly more code changes to +migrate off them (as you would be using e.g. `Buffer.from` under a different name). If you need only +`Buffer.from` polyfilled — `buffer-from` alone which comes with no extra dependencies. + +_Alternatively, you could use [safe-buffer](https://www.npmjs.com/package/safe-buffer) — it also +provides a polyfill, but takes a different approach which has +[it's drawbacks](https://github.com/chalker/safer-buffer#why-not-safe-buffer). It will allow you +to also use the older `new Buffer()` API in your code, though — but that's arguably a benefit, as +it is problematic, can cause issues in your code, and will start emitting runtime deprecation +warnings starting with Node.js 10._ + +Note that in either case, it is important that you also remove all calls to the old Buffer +API manually — just throwing in `safe-buffer` doesn't fix the problem by itself, it just provides +a polyfill for the new API. I have seen people doing that mistake. + +Enabling eslint rule [no-buffer-constructor](https://eslint.org/docs/rules/no-buffer-constructor) +or +[node/no-deprecated-api](https://github.com/mysticatea/eslint-plugin-node/blob/master/docs/rules/no-deprecated-api.md) +is recommended. + +_Don't forget to drop the polyfill usage once you drop support for Node.js < 4.5.0._ + + +## Variant 3 — manual detection, with safeguards + +This is useful if you create Buffer instances in only a few places (e.g. one), or you have your own +wrapper around them. + +### Buffer(0) + +This special case for creating empty buffers can be safely replaced with `Buffer.concat([])`, which +returns the same result all the way down to Node.js 0.8.x. + +### Buffer(notNumber) + +Before: + +```js +var buf = new Buffer(notNumber, encoding); +``` + +After: + +```js +var buf; +if (Buffer.from && Buffer.from !== Uint8Array.from) { + buf = Buffer.from(notNumber, encoding); +} else { + if (typeof notNumber === 'number') + throw new Error('The "size" argument must be of type number.'); + buf = new Buffer(notNumber, encoding); +} +``` + +`encoding` is optional. + +Note that the `typeof notNumber` before `new Buffer` is required (for cases when `notNumber` argument is not +hard-coded) and _is not caused by the deprecation of Buffer constructor_ — it's exactly _why_ the +Buffer constructor is deprecated. Ecosystem packages lacking this type-check caused numereous +security issues — situations when unsanitized user input could end up in the `Buffer(arg)` create +problems ranging from DoS to leaking sensitive information to the attacker from the process memory. + +When `notNumber` argument is hardcoded (e.g. literal `"abc"` or `[0,1,2]`), the `typeof` check can +be omitted. + +Also note that using TypeScript does not fix this problem for you — when libs written in +`TypeScript` are used from JS, or when user input ends up there — it behaves exactly as pure JS, as +all type checks are translation-time only and are not present in the actual JS code which TS +compiles to. + +### Buffer(number) + +For Node.js 0.10.x (and below) support: + +```js +var buf; +if (Buffer.alloc) { + buf = Buffer.alloc(number); +} else { + buf = new Buffer(number); + buf.fill(0); +} +``` + +Otherwise (Node.js ≥ 0.12.x): + +```js +const buf = Buffer.alloc ? Buffer.alloc(number) : new Buffer(number).fill(0); +``` + +## Regarding Buffer.allocUnsafe + +Be extra cautious when using `Buffer.allocUnsafe`: + * Don't use it if you don't have a good reason to + * e.g. you probably won't ever see a performance difference for small buffers, in fact, those + might be even faster with `Buffer.alloc()`, + * if your code is not in the hot code path — you also probably won't notice a difference, + * keep in mind that zero-filling minimizes the potential risks. + * If you use it, make sure that you never return the buffer in a partially-filled state, + * if you are writing to it sequentially — always truncate it to the actuall written length + +Errors in handling buffers allocated with `Buffer.allocUnsafe` could result in various issues, +ranged from undefined behaviour of your code to sensitive data (user input, passwords, certs) +leaking to the remote attacker. + +_Note that the same applies to `new Buffer` usage without zero-filling, depending on the Node.js +version (and lacking type checks also adds DoS to the list of potential problems)._ + + +## FAQ + + +### What is wrong with the `Buffer` constructor? + +The `Buffer` constructor could be used to create a buffer in many different ways: + +- `new Buffer(42)` creates a `Buffer` of 42 bytes. Before Node.js 8, this buffer contained + *arbitrary memory* for performance reasons, which could include anything ranging from + program source code to passwords and encryption keys. +- `new Buffer('abc')` creates a `Buffer` that contains the UTF-8-encoded version of + the string `'abc'`. A second argument could specify another encoding: For example, + `new Buffer(string, 'base64')` could be used to convert a Base64 string into the original + sequence of bytes that it represents. +- There are several other combinations of arguments. + +This meant that, in code like `var buffer = new Buffer(foo);`, *it is not possible to tell +what exactly the contents of the generated buffer are* without knowing the type of `foo`. + +Sometimes, the value of `foo` comes from an external source. For example, this function +could be exposed as a service on a web server, converting a UTF-8 string into its Base64 form: + +``` +function stringToBase64(req, res) { + // The request body should have the format of `{ string: 'foobar' }` + const rawBytes = new Buffer(req.body.string) + const encoded = rawBytes.toString('base64') + res.end({ encoded: encoded }) +} +``` + +Note that this code does *not* validate the type of `req.body.string`: + +- `req.body.string` is expected to be a string. If this is the case, all goes well. +- `req.body.string` is controlled by the client that sends the request. +- If `req.body.string` is the *number* `50`, the `rawBytes` would be 50 bytes: + - Before Node.js 8, the content would be uninitialized + - After Node.js 8, the content would be `50` bytes with the value `0` + +Because of the missing type check, an attacker could intentionally send a number +as part of the request. Using this, they can either: + +- Read uninitialized memory. This **will** leak passwords, encryption keys and other + kinds of sensitive information. (Information leak) +- Force the program to allocate a large amount of memory. For example, when specifying + `500000000` as the input value, each request will allocate 500MB of memory. + This can be used to either exhaust the memory available of a program completely + and make it crash, or slow it down significantly. (Denial of Service) + +Both of these scenarios are considered serious security issues in a real-world +web server context. + +when using `Buffer.from(req.body.string)` instead, passing a number will always +throw an exception instead, giving a controlled behaviour that can always be +handled by the program. + + +### The `Buffer()` constructor has been deprecated for a while. Is this really an issue? + +Surveys of code in the `npm` ecosystem have shown that the `Buffer()` constructor is still +widely used. This includes new code, and overall usage of such code has actually been +*increasing*. diff --git a/reverse_engineering/node_modules/safer-buffer/Readme.md b/reverse_engineering/node_modules/safer-buffer/Readme.md new file mode 100644 index 0000000..14b0822 --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/Readme.md @@ -0,0 +1,156 @@ +# safer-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![javascript style guide][standard-image]][standard-url] [![Security Responsible Disclosure][secuirty-image]][secuirty-url] + +[travis-image]: https://travis-ci.org/ChALkeR/safer-buffer.svg?branch=master +[travis-url]: https://travis-ci.org/ChALkeR/safer-buffer +[npm-image]: https://img.shields.io/npm/v/safer-buffer.svg +[npm-url]: https://npmjs.org/package/safer-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com +[secuirty-image]: https://img.shields.io/badge/Security-Responsible%20Disclosure-green.svg +[secuirty-url]: https://github.com/nodejs/security-wg/blob/master/processes/responsible_disclosure_template.md + +Modern Buffer API polyfill without footguns, working on Node.js from 0.8 to current. + +## How to use? + +First, port all `Buffer()` and `new Buffer()` calls to `Buffer.alloc()` and `Buffer.from()` API. + +Then, to achieve compatibility with outdated Node.js versions (`<4.5.0` and 5.x `<5.9.0`), use +`const Buffer = require('safer-buffer').Buffer` in all files where you make calls to the new +Buffer API. _Use `var` instead of `const` if you need that for your Node.js version range support._ + +Also, see the +[porting Buffer](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) guide. + +## Do I need it? + +Hopefully, not — dropping support for outdated Node.js versions should be fine nowdays, and that +is the recommended path forward. You _do_ need to port to the `Buffer.alloc()` and `Buffer.from()` +though. + +See the [porting guide](https://github.com/ChALkeR/safer-buffer/blob/master/Porting-Buffer.md) +for a better description. + +## Why not [safe-buffer](https://npmjs.com/safe-buffer)? + +_In short: while `safe-buffer` serves as a polyfill for the new API, it allows old API usage and +itself contains footguns._ + +`safe-buffer` could be used safely to get the new API while still keeping support for older +Node.js versions (like this module), but while analyzing ecosystem usage of the old Buffer API +I found out that `safe-buffer` is itself causing problems in some cases. + +For example, consider the following snippet: + +```console +$ cat example.unsafe.js +console.log(Buffer(20)) +$ ./node-v6.13.0-linux-x64/bin/node example.unsafe.js + +$ standard example.unsafe.js +standard: Use JavaScript Standard Style (https://standardjs.com) + /home/chalker/repo/safer-buffer/example.unsafe.js:2:13: 'Buffer()' was deprecated since v6. Use 'Buffer.alloc()' or 'Buffer.from()' (use 'https://www.npmjs.com/package/safe-buffer' for '<4.5.0') instead. +``` + +This is allocates and writes to console an uninitialized chunk of memory. +[standard](https://www.npmjs.com/package/standard) linter (among others) catch that and warn people +to avoid using unsafe API. + +Let's now throw in `safe-buffer`! + +```console +$ cat example.safe-buffer.js +const Buffer = require('safe-buffer').Buffer +console.log(Buffer(20)) +$ standard example.safe-buffer.js +$ ./node-v6.13.0-linux-x64/bin/node example.safe-buffer.js + +``` + +See the problem? Adding in `safe-buffer` _magically removes the lint warning_, but the behavior +remains identiсal to what we had before, and when launched on Node.js 6.x LTS — this dumps out +chunks of uninitialized memory. +_And this code will still emit runtime warnings on Node.js 10.x and above._ + +That was done by design. I first considered changing `safe-buffer`, prohibiting old API usage or +emitting warnings on it, but that significantly diverges from `safe-buffer` design. After some +discussion, it was decided to move my approach into a separate package, and _this is that separate +package_. + +This footgun is not imaginary — I observed top-downloaded packages doing that kind of thing, +«fixing» the lint warning by blindly including `safe-buffer` without any actual changes. + +Also in some cases, even if the API _was_ migrated to use of safe Buffer API — a random pull request +can bring unsafe Buffer API usage back to the codebase by adding new calls — and that could go +unnoticed even if you have a linter prohibiting that (becase of the reason stated above), and even +pass CI. _I also observed that being done in popular packages._ + +Some examples: + * [webdriverio](https://github.com/webdriverio/webdriverio/commit/05cbd3167c12e4930f09ef7cf93b127ba4effae4#diff-124380949022817b90b622871837d56cR31) + (a module with 548 759 downloads/month), + * [websocket-stream](https://github.com/maxogden/websocket-stream/commit/c9312bd24d08271687d76da0fe3c83493871cf61) + (218 288 d/m, fix in [maxogden/websocket-stream#142](https://github.com/maxogden/websocket-stream/pull/142)), + * [node-serialport](https://github.com/node-serialport/node-serialport/commit/e8d9d2b16c664224920ce1c895199b1ce2def48c) + (113 138 d/m, fix in [node-serialport/node-serialport#1510](https://github.com/node-serialport/node-serialport/pull/1510)), + * [karma](https://github.com/karma-runner/karma/commit/3d94b8cf18c695104ca195334dc75ff054c74eec) + (3 973 193 d/m, fix in [karma-runner/karma#2947](https://github.com/karma-runner/karma/pull/2947)), + * [spdy-transport](https://github.com/spdy-http2/spdy-transport/commit/5375ac33f4a62a4f65bcfc2827447d42a5dbe8b1) + (5 970 727 d/m, fix in [spdy-http2/spdy-transport#53](https://github.com/spdy-http2/spdy-transport/pull/53)). + * And there are a lot more over the ecosystem. + +I filed a PR at +[mysticatea/eslint-plugin-node#110](https://github.com/mysticatea/eslint-plugin-node/pull/110) to +partially fix that (for cases when that lint rule is used), but it is a semver-major change for +linter rules and presets, so it would take significant time for that to reach actual setups. +_It also hasn't been released yet (2018-03-20)._ + +Also, `safer-buffer` discourages the usage of `.allocUnsafe()`, which is often done by a mistake. +It still supports it with an explicit concern barier, by placing it under +`require('safer-buffer/dangereous')`. + +## But isn't throwing bad? + +Not really. It's an error that could be noticed and fixed early, instead of causing havoc later like +unguarded `new Buffer()` calls that end up receiving user input can do. + +This package affects only the files where `var Buffer = require('safer-buffer').Buffer` was done, so +it is really simple to keep track of things and make sure that you don't mix old API usage with that. +Also, CI should hint anything that you might have missed. + +New commits, if tested, won't land new usage of unsafe Buffer API this way. +_Node.js 10.x also deals with that by printing a runtime depecation warning._ + +### Would it affect third-party modules? + +No, unless you explicitly do an awful thing like monkey-patching or overriding the built-in `Buffer`. +Don't do that. + +### But I don't want throwing… + +That is also fine! + +Also, it could be better in some cases when you don't comprehensive enough test coverage. + +In that case — just don't override `Buffer` and use +`var SaferBuffer = require('safer-buffer').Buffer` instead. + +That way, everything using `Buffer` natively would still work, but there would be two drawbacks: + +* `Buffer.from`/`Buffer.alloc` won't be polyfilled — use `SaferBuffer.from` and + `SaferBuffer.alloc` instead. +* You are still open to accidentally using the insecure deprecated API — use a linter to catch that. + +Note that using a linter to catch accidential `Buffer` constructor usage in this case is strongly +recommended. `Buffer` is not overriden in this usecase, so linters won't get confused. + +## «Without footguns»? + +Well, it is still possible to do _some_ things with `Buffer` API, e.g. accessing `.buffer` property +on older versions and duping things from there. You shouldn't do that in your code, probabably. + +The intention is to remove the most significant footguns that affect lots of packages in the +ecosystem, and to do it in the proper way. + +Also, this package doesn't protect against security issues affecting some Node.js versions, so for +usage in your own production code, it is still recommended to update to a Node.js version +[supported by upstream](https://github.com/nodejs/release#release-schedule). diff --git a/reverse_engineering/node_modules/safer-buffer/dangerous.js b/reverse_engineering/node_modules/safer-buffer/dangerous.js new file mode 100644 index 0000000..ca41fdc --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/dangerous.js @@ -0,0 +1,58 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer +var safer = require('./safer.js') +var Safer = safer.Buffer + +var dangerous = {} + +var key + +for (key in safer) { + if (!safer.hasOwnProperty(key)) continue + dangerous[key] = safer[key] +} + +var Dangereous = dangerous.Buffer = {} + +// Copy Safer API +for (key in Safer) { + if (!Safer.hasOwnProperty(key)) continue + Dangereous[key] = Safer[key] +} + +// Copy those missing unsafe methods, if they are present +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (Dangereous.hasOwnProperty(key)) continue + Dangereous[key] = Buffer[key] +} + +if (!Dangereous.allocUnsafe) { + Dangereous.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return Buffer(size) + } +} + +if (!Dangereous.allocUnsafeSlow) { + Dangereous.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + return buffer.SlowBuffer(size) + } +} + +module.exports = dangerous diff --git a/reverse_engineering/node_modules/safer-buffer/package.json b/reverse_engineering/node_modules/safer-buffer/package.json new file mode 100644 index 0000000..895825b --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/package.json @@ -0,0 +1,63 @@ +{ + "_args": [ + [ + "safer-buffer@2.1.2", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "safer-buffer@2.1.2", + "_id": "safer-buffer@2.1.2", + "_inBundle": false, + "_integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "_location": "/safer-buffer", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "safer-buffer@2.1.2", + "name": "safer-buffer", + "escapedName": "safer-buffer", + "rawSpec": "2.1.2", + "saveSpec": null, + "fetchSpec": "2.1.2" + }, + "_requiredBy": [ + "/asn1" + ], + "_resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "_spec": "2.1.2", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Nikita Skovoroda", + "email": "chalkerx@gmail.com", + "url": "https://github.com/ChALkeR" + }, + "bugs": { + "url": "https://github.com/ChALkeR/safer-buffer/issues" + }, + "description": "Modern Buffer API polyfill without footguns", + "devDependencies": { + "standard": "^11.0.1", + "tape": "^4.9.0" + }, + "files": [ + "Porting-Buffer.md", + "Readme.md", + "tests.js", + "dangerous.js", + "safer.js" + ], + "homepage": "https://github.com/ChALkeR/safer-buffer#readme", + "license": "MIT", + "main": "safer.js", + "name": "safer-buffer", + "repository": { + "type": "git", + "url": "git+https://github.com/ChALkeR/safer-buffer.git" + }, + "scripts": { + "browserify-test": "browserify --external tape tests.js > browserify-tests.js && tape browserify-tests.js", + "test": "standard && tape tests.js" + }, + "version": "2.1.2" +} diff --git a/reverse_engineering/node_modules/safer-buffer/safer.js b/reverse_engineering/node_modules/safer-buffer/safer.js new file mode 100644 index 0000000..37c7e1a --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/safer.js @@ -0,0 +1,77 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var buffer = require('buffer') +var Buffer = buffer.Buffer + +var safer = {} + +var key + +for (key in buffer) { + if (!buffer.hasOwnProperty(key)) continue + if (key === 'SlowBuffer' || key === 'Buffer') continue + safer[key] = buffer[key] +} + +var Safer = safer.Buffer = {} +for (key in Buffer) { + if (!Buffer.hasOwnProperty(key)) continue + if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue + Safer[key] = Buffer[key] +} + +safer.Buffer.prototype = Buffer.prototype + +if (!Safer.from || Safer.from === Uint8Array.from) { + Safer.from = function (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value) + } + if (value && typeof value.length === 'undefined') { + throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value) + } + return Buffer(value, encodingOrOffset, length) + } +} + +if (!Safer.alloc) { + Safer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size) + } + if (size < 0 || size >= 2 * (1 << 30)) { + throw new RangeError('The value "' + size + '" is invalid for option "size"') + } + var buf = Buffer(size) + if (!fill || fill.length === 0) { + buf.fill(0) + } else if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + return buf + } +} + +if (!safer.kStringMaxLength) { + try { + safer.kStringMaxLength = process.binding('buffer').kStringMaxLength + } catch (e) { + // we can't determine kStringMaxLength in environments where process.binding + // is unsupported, so let's not set it + } +} + +if (!safer.constants) { + safer.constants = { + MAX_LENGTH: safer.kMaxLength + } + if (safer.kStringMaxLength) { + safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength + } +} + +module.exports = safer diff --git a/reverse_engineering/node_modules/safer-buffer/tests.js b/reverse_engineering/node_modules/safer-buffer/tests.js new file mode 100644 index 0000000..7ed2777 --- /dev/null +++ b/reverse_engineering/node_modules/safer-buffer/tests.js @@ -0,0 +1,406 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +var test = require('tape') + +var buffer = require('buffer') + +var index = require('./') +var safer = require('./safer') +var dangerous = require('./dangerous') + +/* Inheritance tests */ + +test('Default is Safer', function (t) { + t.equal(index, safer) + t.notEqual(safer, dangerous) + t.notEqual(index, dangerous) + t.end() +}) + +test('Is not a function', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'object') + }); + [buffer].forEach(function (impl) { + t.equal(typeof impl, 'object') + t.equal(typeof impl.Buffer, 'function') + }) + t.end() +}) + +test('Constructor throws', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer() }) + t.throws(function () { impl.Buffer(0) }) + t.throws(function () { impl.Buffer('a') }) + t.throws(function () { impl.Buffer('a', 'utf-8') }) + t.throws(function () { return new impl.Buffer() }) + t.throws(function () { return new impl.Buffer(0) }) + t.throws(function () { return new impl.Buffer('a') }) + t.throws(function () { return new impl.Buffer('a', 'utf-8') }) + }) + t.end() +}) + +test('Safe methods exist', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.alloc, 'function', 'alloc') + t.equal(typeof impl.Buffer.from, 'function', 'from') + }) + t.end() +}) + +test('Unsafe methods exist only in Dangerous', function (t) { + [index, safer].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'undefined') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'undefined') + }); + [dangerous].forEach(function (impl) { + t.equal(typeof impl.Buffer.allocUnsafe, 'function') + t.equal(typeof impl.Buffer.allocUnsafeSlow, 'function') + }) + t.end() +}) + +test('Generic methods/properties are defined and equal', function (t) { + ['poolSize', 'isBuffer', 'concat', 'byteLength'].forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in buffer static methods/properties are inherited', function (t) { + Object.keys(buffer).forEach(function (method) { + if (method === 'SlowBuffer' || method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], buffer[method], method) + t.notEqual(typeof impl[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Built-in Buffer static methods/properties are inherited', function (t) { + Object.keys(buffer.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], buffer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('.prototype property of Buffer is inherited', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.prototype, buffer.Buffer.prototype, 'prototype') + t.notEqual(typeof impl.Buffer.prototype, 'undefined', 'prototype') + }) + t.end() +}) + +test('All Safer methods are present in Dangerous', function (t) { + Object.keys(safer).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], safer[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(safer.Buffer).forEach(function (method) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], safer.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +test('Safe methods from Dangerous methods are present in Safer', function (t) { + Object.keys(dangerous).forEach(function (method) { + if (method === 'Buffer') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl[method], dangerous[method], method) + if (method !== 'kStringMaxLength') { + t.notEqual(typeof impl[method], 'undefined', method) + } + }) + }) + Object.keys(dangerous.Buffer).forEach(function (method) { + if (method === 'allocUnsafe' || method === 'allocUnsafeSlow') return; + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer[method], dangerous.Buffer[method], method) + t.notEqual(typeof impl.Buffer[method], 'undefined', method) + }) + }) + t.end() +}) + +/* Behaviour tests */ + +test('Methods return Buffers', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(0, 'a'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(10, 'x'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.alloc(9, 'ab'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(''))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('string', 'utf-8'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([0, 42, 3]))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from(new Uint8Array([0, 42, 3])))) + t.ok(buffer.Buffer.isBuffer(impl.Buffer.from([]))) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](0))) + t.ok(buffer.Buffer.isBuffer(dangerous.Buffer[method](10))) + }) + t.end() +}) + +test('Constructor is buffer.Buffer', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(0, 'a').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10).constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(10, 'x').constructor, buffer.Buffer) + t.equal(impl.Buffer.alloc(9, 'ab').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('string', 'utf-8').constructor, buffer.Buffer) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').constructor, buffer.Buffer) + t.equal(impl.Buffer.from([0, 42, 3]).constructor, buffer.Buffer) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).constructor, buffer.Buffer) + t.equal(impl.Buffer.from([]).constructor, buffer.Buffer) + }); + [0, 10, 100].forEach(function (arg) { + t.equal(dangerous.Buffer.allocUnsafe(arg).constructor, buffer.Buffer) + t.equal(dangerous.Buffer.allocUnsafeSlow(arg).constructor, buffer.SlowBuffer(0).constructor) + }) + t.end() +}) + +test('Invalid calls throw', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.throws(function () { impl.Buffer.from(0) }) + t.throws(function () { impl.Buffer.from(10) }) + t.throws(function () { impl.Buffer.from(10, 'utf-8') }) + t.throws(function () { impl.Buffer.from('string', 'invalid encoding') }) + t.throws(function () { impl.Buffer.from(-10) }) + t.throws(function () { impl.Buffer.from(1e90) }) + t.throws(function () { impl.Buffer.from(Infinity) }) + t.throws(function () { impl.Buffer.from(-Infinity) }) + t.throws(function () { impl.Buffer.from(NaN) }) + t.throws(function () { impl.Buffer.from(null) }) + t.throws(function () { impl.Buffer.from(undefined) }) + t.throws(function () { impl.Buffer.from() }) + t.throws(function () { impl.Buffer.from({}) }) + t.throws(function () { impl.Buffer.alloc('') }) + t.throws(function () { impl.Buffer.alloc('string') }) + t.throws(function () { impl.Buffer.alloc('string', 'utf-8') }) + t.throws(function () { impl.Buffer.alloc('b25ldHdvdGhyZWU=', 'base64') }) + t.throws(function () { impl.Buffer.alloc(-10) }) + t.throws(function () { impl.Buffer.alloc(1e90) }) + t.throws(function () { impl.Buffer.alloc(2 * (1 << 30)) }) + t.throws(function () { impl.Buffer.alloc(Infinity) }) + t.throws(function () { impl.Buffer.alloc(-Infinity) }) + t.throws(function () { impl.Buffer.alloc(null) }) + t.throws(function () { impl.Buffer.alloc(undefined) }) + t.throws(function () { impl.Buffer.alloc() }) + t.throws(function () { impl.Buffer.alloc([]) }) + t.throws(function () { impl.Buffer.alloc([0, 42, 3]) }) + t.throws(function () { impl.Buffer.alloc({}) }) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.throws(function () { dangerous.Buffer[method]('') }) + t.throws(function () { dangerous.Buffer[method]('string') }) + t.throws(function () { dangerous.Buffer[method]('string', 'utf-8') }) + t.throws(function () { dangerous.Buffer[method](2 * (1 << 30)) }) + t.throws(function () { dangerous.Buffer[method](Infinity) }) + if (dangerous.Buffer[method] === buffer.Buffer.allocUnsafe) { + t.skip('Skipping, older impl of allocUnsafe coerced negative sizes to 0') + } else { + t.throws(function () { dangerous.Buffer[method](-10) }) + t.throws(function () { dangerous.Buffer[method](-1e90) }) + t.throws(function () { dangerous.Buffer[method](-Infinity) }) + } + t.throws(function () { dangerous.Buffer[method](null) }) + t.throws(function () { dangerous.Buffer[method](undefined) }) + t.throws(function () { dangerous.Buffer[method]() }) + t.throws(function () { dangerous.Buffer[method]([]) }) + t.throws(function () { dangerous.Buffer[method]([0, 42, 3]) }) + t.throws(function () { dangerous.Buffer[method]({}) }) + }) + t.end() +}) + +test('Buffers have appropriate lengths', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.equal(impl.Buffer.alloc(0).length, 0) + t.equal(impl.Buffer.alloc(10).length, 10) + t.equal(impl.Buffer.from('').length, 0) + t.equal(impl.Buffer.from('string').length, 6) + t.equal(impl.Buffer.from('string', 'utf-8').length, 6) + t.equal(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64').length, 11) + t.equal(impl.Buffer.from([0, 42, 3]).length, 3) + t.equal(impl.Buffer.from(new Uint8Array([0, 42, 3])).length, 3) + t.equal(impl.Buffer.from([]).length, 0) + }); + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + t.equal(dangerous.Buffer[method](0).length, 0) + t.equal(dangerous.Buffer[method](10).length, 10) + }) + t.end() +}) + +test('Buffers have appropriate lengths (2)', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true; + [ safer.Buffer.alloc, + dangerous.Buffer.allocUnsafe, + dangerous.Buffer.allocUnsafeSlow + ].forEach(function (method) { + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 1e5) + var buf = method(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + } + }) + t.ok(ok) + t.end() +}) + +test('.alloc(size) is zero-filled and has correct length', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = index.Buffer.alloc(length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.allocUnsafe / .allocUnsafeSlow are fillable and have correct lengths', function (t) { + ['allocUnsafe', 'allocUnsafeSlow'].forEach(function (method) { + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var buf = dangerous.Buffer[method](length) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + buf.fill(0, 0, length) + var j + for (j = 0; j < length; j++) { + if (buf[j] !== 0) ok = false + } + buf.fill(1, 0, length) + for (j = 0; j < length; j++) { + if (buf[j] !== 1) ok = false + } + } + t.ok(ok, method) + }) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.end() +}) + +test('.alloc(size, fill) is `fill`-filled', function (t) { + t.equal(index.Buffer.alloc, safer.Buffer.alloc) + t.equal(index.Buffer.alloc, dangerous.Buffer.alloc) + var ok = true + for (var i = 0; i < 1e2; i++) { + var length = Math.round(Math.random() * 2e6) + var fill = Math.round(Math.random() * 255) + var buf = index.Buffer.alloc(length, fill) + if (!buffer.Buffer.isBuffer(buf)) ok = false + if (buf.length !== length) ok = false + for (var j = 0; j < length; j++) { + if (buf[j] !== fill) ok = false + } + } + t.ok(ok) + t.deepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 97)) + t.notDeepEqual(index.Buffer.alloc(9, 'a'), index.Buffer.alloc(9, 98)) + + var tmp = new buffer.Buffer(2) + tmp.fill('ok') + if (tmp[1] === tmp[0]) { + // Outdated Node.js + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('ooooo')) + } else { + t.deepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('okoko')) + } + t.notDeepEqual(index.Buffer.alloc(5, 'ok'), index.Buffer.from('kokok')) + + t.end() +}) + +test('safer.Buffer.from returns results same as Buffer constructor', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), new buffer.Buffer('')) + t.deepEqual(impl.Buffer.from('string'), new buffer.Buffer('string')) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), new buffer.Buffer('string', 'utf-8')) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), new buffer.Buffer('b25ldHdvdGhyZWU=', 'base64')) + t.deepEqual(impl.Buffer.from([0, 42, 3]), new buffer.Buffer([0, 42, 3])) + t.deepEqual(impl.Buffer.from(new Uint8Array([0, 42, 3])), new buffer.Buffer(new Uint8Array([0, 42, 3]))) + t.deepEqual(impl.Buffer.from([]), new buffer.Buffer([])) + }) + t.end() +}) + +test('safer.Buffer.from returns consistent results', function (t) { + [index, safer, dangerous].forEach(function (impl) { + t.deepEqual(impl.Buffer.from(''), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from([]), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from(new Uint8Array([])), impl.Buffer.alloc(0)) + t.deepEqual(impl.Buffer.from('string', 'utf-8'), impl.Buffer.from('string')) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from([115, 116, 114, 105, 110, 103])) + t.deepEqual(impl.Buffer.from('string'), impl.Buffer.from(impl.Buffer.from('string'))) + t.deepEqual(impl.Buffer.from('b25ldHdvdGhyZWU=', 'base64'), impl.Buffer.from('onetwothree')) + t.notDeepEqual(impl.Buffer.from('b25ldHdvdGhyZWU='), impl.Buffer.from('onetwothree')) + }) + t.end() +}) diff --git a/reverse_engineering/node_modules/semver/CHANGELOG.md b/reverse_engineering/node_modules/semver/CHANGELOG.md new file mode 100644 index 0000000..66304fd --- /dev/null +++ b/reverse_engineering/node_modules/semver/CHANGELOG.md @@ -0,0 +1,39 @@ +# changes log + +## 5.7 + +* Add `minVersion` method + +## 5.6 + +* Move boolean `loose` param to an options object, with + backwards-compatibility protection. +* Add ability to opt out of special prerelease version handling with + the `includePrerelease` option flag. + +## 5.5 + +* Add version coercion capabilities + +## 5.4 + +* Add intersection checking + +## 5.3 + +* Add `minSatisfying` method + +## 5.2 + +* Add `prerelease(v)` that returns prerelease components + +## 5.1 + +* Add Backus-Naur for ranges +* Remove excessively cute inspection methods + +## 5.0 + +* Remove AMD/Browserified build artifacts +* Fix ltr and gtr when using the `*` range +* Fix for range `*` with a prerelease identifier diff --git a/reverse_engineering/node_modules/semver/LICENSE b/reverse_engineering/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/reverse_engineering/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/reverse_engineering/node_modules/semver/README.md b/reverse_engineering/node_modules/semver/README.md new file mode 100644 index 0000000..f8dfa5a --- /dev/null +++ b/reverse_engineering/node_modules/semver/README.md @@ -0,0 +1,412 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install --save semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any version satisfies) +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero digit in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0` +* `^0.2.3` := `>=0.2.3 <0.3.0` +* `^0.0.3` := `>=0.0.3 <0.0.4` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0` +* `^0.0.x` := `>=0.0.0 <0.1.0` +* `^0.0` := `>=0.0.0 <0.1.0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0` +* `^0.x` := `>=0.0.0 <1.0.0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the ranges comparators intersect + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). diff --git a/reverse_engineering/node_modules/semver/bin/semver b/reverse_engineering/node_modules/semver/bin/semver new file mode 100755 index 0000000..801e77f --- /dev/null +++ b/reverse_engineering/node_modules/semver/bin/semver @@ -0,0 +1,160 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + +var versions = [] + +var range = [] + +var inc = null + +var version = require('../package.json').version + +var loose = false + +var includePrerelease = false + +var coerce = false + +var identifier + +var semver = require('../semver') + +var reverse = false + +var options = {} + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + a = a.slice(0, indexOfEqualSign) + argv.unshift(a.slice(indexOfEqualSign + 1)) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + var options = { loose: loose, includePrerelease: includePrerelease } + + versions = versions.map(function (v) { + return coerce ? (semver.coerce(v) || { version: v }).version : v + }).filter(function (v) { + return semver.valid(v) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) { return failInc() } + + for (var i = 0, l = range.length; i < l; i++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error('--inc can only be used on a single version with no range') + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? 'rcompare' : 'compare' + versions.sort(function (a, b) { + return semver[compare](a, b, options) + }).map(function (v) { + return semver.clean(v, options) + }).map(function (v) { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach(function (v, i, _) { console.log(v) }) +} + +function help () { + console.log(['SemVer ' + version, + '', + 'A JavaScript implementation of the https://semver.org/ specification', + 'Copyright Isaac Z. Schlueter', + '', + 'Usage: semver [options] [ [...]]', + 'Prints valid versions sorted by SemVer precedence', + '', + 'Options:', + '-r --range ', + ' Print versions that match the specified range.', + '', + '-i --increment []', + ' Increment a version by the specified level. Level can', + ' be one of: major, minor, patch, premajor, preminor,', + " prepatch, or prerelease. Default level is 'patch'.", + ' Only one version may be specified.', + '', + '--preid ', + ' Identifier to be used to prefix premajor, preminor,', + ' prepatch or prerelease version increments.', + '', + '-l --loose', + ' Interpret versions and ranges loosely', + '', + '-p --include-prerelease', + ' Always include prerelease versions in range matching', + '', + '-c --coerce', + ' Coerce a string into SemVer if possible', + ' (does not imply --loose)', + '', + 'Program exits successfully if any valid version satisfies', + 'all supplied ranges, and prints all satisfying versions.', + '', + 'If no satisfying versions are found, then exits failure.', + '', + 'Versions are printed in ascending order, so supplying', + 'multiple versions to the utility will just sort them.' + ].join('\n')) +} diff --git a/reverse_engineering/node_modules/semver/package.json b/reverse_engineering/node_modules/semver/package.json new file mode 100644 index 0000000..3d1a79f --- /dev/null +++ b/reverse_engineering/node_modules/semver/package.json @@ -0,0 +1,63 @@ +{ + "_args": [ + [ + "semver@5.7.1", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "semver@5.7.1", + "_id": "semver@5.7.1", + "_inBundle": false, + "_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "_location": "/semver", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "semver@5.7.1", + "name": "semver", + "escapedName": "semver", + "rawSpec": "5.7.1", + "saveSpec": null, + "fetchSpec": "5.7.1" + }, + "_requiredBy": [ + "/ssh2-streams" + ], + "_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "_spec": "5.7.1", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "bin": { + "semver": "bin/semver" + }, + "bugs": { + "url": "https://github.com/npm/node-semver/issues" + }, + "description": "The semantic version parser used by npm.", + "devDependencies": { + "tap": "^13.0.0-rc.18" + }, + "files": [ + "bin", + "range.bnf", + "semver.js" + ], + "homepage": "https://github.com/npm/node-semver#readme", + "license": "ISC", + "main": "semver.js", + "name": "semver", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-semver.git" + }, + "scripts": { + "postpublish": "git push origin --all; git push origin --tags", + "postversion": "npm publish", + "preversion": "npm test", + "test": "tap" + }, + "tap": { + "check-coverage": true + }, + "version": "5.7.1" +} diff --git a/reverse_engineering/node_modules/semver/range.bnf b/reverse_engineering/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/reverse_engineering/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/reverse_engineering/node_modules/semver/semver.js b/reverse_engineering/node_modules/semver/semver.js new file mode 100644 index 0000000..d315d5d --- /dev/null +++ b/reverse_engineering/node_modules/semver/semver.js @@ -0,0 +1,1483 @@ +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' + +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' + +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' + +src[FULL] = '^' + FULLPLAIN + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' + +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' + +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' + +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' + +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' + +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' + +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' + +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY) { + return true + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + var match = version.match(re[COERCE]) + + if (match == null) { + return null + } + + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) +} diff --git a/reverse_engineering/node_modules/split2/LICENSE b/reverse_engineering/node_modules/split2/LICENSE new file mode 100644 index 0000000..a91afe5 --- /dev/null +++ b/reverse_engineering/node_modules/split2/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014-2018, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/reverse_engineering/node_modules/split2/README.md b/reverse_engineering/node_modules/split2/README.md new file mode 100644 index 0000000..00db262 --- /dev/null +++ b/reverse_engineering/node_modules/split2/README.md @@ -0,0 +1,97 @@ +# Split2(matcher, mapper, options) + +![ci](https://github.com/mcollina/split2/workflows/ci/badge.svg) + +Break up a stream and reassemble it so that each line is a chunk. +`split2` is inspired by [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) module, +and it is totally API compatible with it. +However, it is based on Node.js core [`Transform`](https://nodejs.org/api/stream.html#stream_new_stream_transform_options) via [`readable-stream`](https://github.com/nodejs/readable-stream) + +`matcher` may be a `String`, or a `RegExp`. Example, read every line in a file ... + +``` js + fs.createReadStream(file) + .pipe(split2()) + .on('data', function (line) { + //each chunk now is a separate line! + }) + +``` + +`split` takes the same arguments as `string.split` except it defaults to '/\r?\n/', and the optional `limit` paremeter is ignored. +[String#split](https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/String/split) + +`split` takes an optional options object on it's third argument, which +is directly passed as a +[Transform](https://nodejs.org/api/stream.html#stream_new_stream_transform_options) +option. + +Additionally, the `.maxLength` and `.skipOverflow` options are implemented, which set limits on the internal +buffer size and the stream's behavior when the limit is exceeded. There is no limit unless `maxLength` is set. When +the internal buffer size exceeds `maxLength`, the stream emits an error by default. You may also set `skipOverflow` to +true to suppress the error and instead skip past any lines that cause the internal buffer to exceed `maxLength`. + +Calling `.destroy` will make the stream emit `close`. Use this to perform cleanup logic + +``` js +var splitFile = function(filename) { + var file = fs.createReadStream(filename) + + return file + .pipe(split2()) + .on('close', function() { + // destroy the file stream in case the split stream was destroyed + file.destroy() + }) +} + +var stream = splitFile('my-file.txt') + +stream.destroy() // will destroy the input file stream +``` + +# NDJ - Newline Delimited Json + +`split2` accepts a function which transforms each line. + +``` js +fs.createReadStream(file) + .pipe(split2(JSON.parse)) + .on('data', function (obj) { + //each chunk now is a js object + }) + .on("error", function(error) => { + //handling parsing errors + }) +``` + +However, in [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) the mapper +is wrapped in a try-catch, while here it is not: if your parsing logic can throw, wrap it yourself. Otherwise, you can also use the stream error handling when mapper function throw. + +# Benchmark + +```bash +$ node bench.js +benchSplit*10000: 1484.983ms +benchBinarySplit*10000: 1484.080ms +benchSplit*10000: 1407.334ms +benchBinarySplit*10000: 1500.281ms +``` + +Benchmark taken on Node 8.11.3, on a Macbook i5 2018. + +# License + +Copyright (c) 2014-2018, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/reverse_engineering/node_modules/split2/bench.js b/reverse_engineering/node_modules/split2/bench.js new file mode 100644 index 0000000..7a3f9d5 --- /dev/null +++ b/reverse_engineering/node_modules/split2/bench.js @@ -0,0 +1,27 @@ +'use strict' + +var split = require('./') +var bench = require('fastbench') +var binarySplit = require('binary-split') +var fs = require('fs') + +function benchSplit (cb) { + fs.createReadStream('package.json') + .pipe(split()) + .on('end', cb) + .resume() +} + +function benchBinarySplit (cb) { + fs.createReadStream('package.json') + .pipe(binarySplit()) + .on('end', cb) + .resume() +} + +var run = bench([ + benchSplit, + benchBinarySplit +], 10000) + +run(run) diff --git a/reverse_engineering/node_modules/split2/index.js b/reverse_engineering/node_modules/split2/index.js new file mode 100644 index 0000000..fc2007b --- /dev/null +++ b/reverse_engineering/node_modules/split2/index.js @@ -0,0 +1,132 @@ +/* +Copyright (c) 2014-2018, Matteo Collina + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +*/ + +'use strict' + +const { Transform } = require('readable-stream') +const { StringDecoder } = require('string_decoder') +const kLast = Symbol('last') +const kDecoder = Symbol('decoder') + +function transform (chunk, enc, cb) { + var list + if (this.overflow) { // Line buffer is full. Skip to start of next line. + var buf = this[kDecoder].write(chunk) + list = buf.split(this.matcher) + + if (list.length === 1) return cb() // Line ending not found. Discard entire chunk. + + // Line ending found. Discard trailing fragment of previous line and reset overflow state. + list.shift() + this.overflow = false + } else { + this[kLast] += this[kDecoder].write(chunk) + list = this[kLast].split(this.matcher) + } + + this[kLast] = list.pop() + + for (var i = 0; i < list.length; i++) { + try { + push(this, this.mapper(list[i])) + } catch (error) { + return cb(error) + } + } + + this.overflow = this[kLast].length > this.maxLength + if (this.overflow && !this.skipOverflow) return cb(new Error('maximum buffer reached')) + + cb() +} + +function flush (cb) { + // forward any gibberish left in there + this[kLast] += this[kDecoder].end() + + if (this[kLast]) { + try { + push(this, this.mapper(this[kLast])) + } catch (error) { + return cb(error) + } + } + + cb() +} + +function push (self, val) { + if (val !== undefined) { + self.push(val) + } +} + +function noop (incoming) { + return incoming +} + +function split (matcher, mapper, options) { + // Set defaults for any arguments not supplied. + matcher = matcher || /\r?\n/ + mapper = mapper || noop + options = options || {} + + // Test arguments explicitly. + switch (arguments.length) { + case 1: + // If mapper is only argument. + if (typeof matcher === 'function') { + mapper = matcher + matcher = /\r?\n/ + // If options is only argument. + } else if (typeof matcher === 'object' && !(matcher instanceof RegExp)) { + options = matcher + matcher = /\r?\n/ + } + break + + case 2: + // If mapper and options are arguments. + if (typeof matcher === 'function') { + options = mapper + mapper = matcher + matcher = /\r?\n/ + // If matcher and options are arguments. + } else if (typeof mapper === 'object') { + options = mapper + mapper = noop + } + } + + options = Object.assign({}, options) + options.transform = transform + options.flush = flush + options.readableObjectMode = true + + const stream = new Transform(options) + + stream[kLast] = '' + stream[kDecoder] = new StringDecoder('utf8') + stream.matcher = matcher + stream.mapper = mapper + stream.maxLength = options.maxLength + stream.skipOverflow = options.skipOverflow + stream.overflow = false + + return stream +} + +module.exports = split diff --git a/reverse_engineering/node_modules/split2/package.json b/reverse_engineering/node_modules/split2/package.json new file mode 100644 index 0000000..1f20e1d --- /dev/null +++ b/reverse_engineering/node_modules/split2/package.json @@ -0,0 +1,69 @@ +{ + "_from": "split2@^3.1.1", + "_id": "split2@3.2.2", + "_inBundle": false, + "_integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "_location": "/split2", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "split2@^3.1.1", + "name": "split2", + "escapedName": "split2", + "rawSpec": "^3.1.1", + "saveSpec": null, + "fetchSpec": "^3.1.1" + }, + "_requiredBy": [ + "/pgpass" + ], + "_resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "_shasum": "bf2cf2a37d838312c249c89206fd7a17dd12365f", + "_spec": "split2@^3.1.1", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pgpass", + "author": { + "name": "Matteo Collina", + "email": "hello@matteocollina.com" + }, + "bugs": { + "url": "http://github.com/mcollina/split2/issues" + }, + "bundleDependencies": false, + "dependencies": { + "readable-stream": "^3.0.0" + }, + "deprecated": false, + "description": "split a Text Stream into a Line Stream, using Stream 3", + "devDependencies": { + "binary-split": "^1.0.3", + "callback-stream": "^1.1.0", + "fastbench": "^1.0.0", + "nyc": "^15.0.1", + "pre-commit": "^1.1.2", + "safe-buffer": "^5.1.1", + "standard": "^14.0.0", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/mcollina/split2#readme", + "license": "ISC", + "main": "index.js", + "name": "split2", + "pre-commit": [ + "test" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/mcollina/split2.git" + }, + "scripts": { + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js", + "legacy": "tape test.js", + "lint": "standard --verbose", + "test": "npm run lint && npm run unit", + "test:report": "npm run lint && npm run unit:report", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test.js" + }, + "version": "3.2.2", + "website": "https://github.com/mcollina/split2" +} diff --git a/reverse_engineering/node_modules/split2/test.js b/reverse_engineering/node_modules/split2/test.js new file mode 100644 index 0000000..e035787 --- /dev/null +++ b/reverse_engineering/node_modules/split2/test.js @@ -0,0 +1,392 @@ +'use strict' + +var test = require('tape') +var split = require('./') +var callback = require('callback-stream') +var Buffer = require('safe-buffer').Buffer +var strcb = callback.bind(null, { decodeStrings: false }) +var objcb = callback.bind(null, { objectMode: true }) + +test('split two lines on end', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\nworld') +}) + +test('split two lines on two writes', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.write('hello') + input.write('\nworld') + input.end() +}) + +test('split four lines on three writes', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world', 'bye', 'world']) + })) + + input.write('hello\nwor') + input.write('ld\nbye\nwo') + input.write('rld') + input.end() +}) + +test('accumulate multiple writes', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['helloworld']) + })) + + input.write('hello') + input.write('world') + input.end() +}) + +test('split using a custom string matcher', function (t) { + t.plan(2) + + var input = split('~') + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('split using a custom regexp matcher', function (t) { + t.plan(2) + + var input = split(/~/) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('support an option argument', function (t) { + t.plan(2) + + var input = split({ highWaterMark: 2 }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\nworld') +}) + +test('support a mapper function', function (t) { + t.plan(2) + + var a = { a: '42' } + var b = { b: '24' } + + var input = split(JSON.parse) + + input.pipe(objcb(function (err, list) { + t.error(err) + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('\n') + input.end(JSON.stringify(b)) +}) + +test('split lines windows-style', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello\r\nworld') +}) + +test('splits a buffer', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end(Buffer.from('hello\nworld')) +}) + +test('do not end on undefined', function (t) { + t.plan(2) + + var input = split(function (line) { }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, []) + })) + + input.end(Buffer.from('hello\nworld')) +}) + +test('has destroy method', function (t) { + t.plan(1) + + var input = split(function (line) { }) + + input.on('close', function () { + t.ok(true, 'close emitted') + t.end() + }) + + input.destroy() +}) + +test('support custom matcher and mapper', function (t) { + t.plan(4) + + var a = { a: '42' } + var b = { b: '24' } + var input = split('~', JSON.parse) + + t.equal(input.matcher, '~') + t.equal(typeof input.mapper, 'function') + + input.pipe(objcb(function (err, list) { + t.notOk(err, 'no errors') + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('~') + input.end(JSON.stringify(b)) +}) + +test('support custom matcher and options', function (t) { + t.plan(6) + + var input = split('~', { highWaterMark: 1024 }) + + t.equal(input.matcher, '~') + t.equal(typeof input.mapper, 'function') + t.equal(input._readableState.highWaterMark, 1024) + t.equal(input._writableState.highWaterMark, 1024) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.end('hello~world') +}) + +test('support mapper and options', function (t) { + t.plan(6) + + var a = { a: '42' } + var b = { b: '24' } + var input = split(JSON.parse, { highWaterMark: 1024 }) + + t.ok(input.matcher instanceof RegExp, 'matcher is RegExp') + t.equal(typeof input.mapper, 'function') + t.equal(input._readableState.highWaterMark, 1024) + t.equal(input._writableState.highWaterMark, 1024) + + input.pipe(objcb(function (err, list) { + t.error(err) + t.deepEqual(list, [a, b]) + })) + + input.write(JSON.stringify(a)) + input.write('\n') + input.end(JSON.stringify(b)) +}) + +test('split utf8 chars', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫烫烫', '锟斤拷']) + })) + + var buf = Buffer.from('烫烫烫\r\n锟斤拷', 'utf8') + for (var i = 0; i < buf.length; ++i) { + input.write(buf.slice(i, i + 1)) + } + input.end() +}) + +test('split utf8 chars 2by2', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫烫烫', '烫烫烫']) + })) + + var str = '烫烫烫\r\n烫烫烫' + var buf = Buffer.from(str, 'utf8') + for (var i = 0; i < buf.length; i += 2) { + input.write(buf.slice(i, i + 2)) + } + input.end() +}) + +test('split lines when the \n comes at the end of a chunk', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['hello', 'world']) + })) + + input.write('hello\n') + input.end('world') +}) + +test('truncated utf-8 char', function (t) { + t.plan(2) + + var input = split() + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['烫' + Buffer.from('e7', 'hex').toString()]) + })) + + var str = '烫烫' + var buf = Buffer.from(str, 'utf8') + + input.write(buf.slice(0, 3)) + input.end(buf.slice(3, 4)) +}) + +test('maximum buffer limit', function (t) { + t.plan(1) + + var input = split({ maxLength: 2 }) + + input.pipe(strcb(function (err, list) { + t.ok(err) + })) + + input.write('hey') +}) + +test('readable highWaterMark', function (t) { + var input = split() + t.equal(input._readableState.highWaterMark, 16) + t.end() +}) + +test('maxLength < chunk size', function (t) { + t.plan(2) + + var input = split({ maxLength: 2 }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['a', 'b']) + })) + + input.end('a\nb') +}) + +test('maximum buffer limit w/skip', function (t) { + t.plan(2) + + var input = split({ maxLength: 2, skipOverflow: true }) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.deepEqual(list, ['a', 'b', 'c']) + })) + + input.write('a\n123') + input.write('456') + input.write('789\nb\nc') + input.end() +}) + +test("don't modify the options object", function (t) { + t.plan(2) + + var options = {} + var input = split(options) + + input.pipe(strcb(function (err, list) { + t.error(err) + t.same(options, {}) + })) + + input.end() +}) + +test('mapper throws flush', function (t) { + t.plan(1) + var error = new Error() + var input = split(function () { + throw error + }) + + input.on('error', (err, list) => { + t.same(err, error) + }) + input.end('hello') +}) + +test('mapper throws on transform', function (t) { + t.plan(2) + + var error = new Error() + var input = split(function (l) { + throw error + }) + + input.on('error', (err) => { + t.same(err, error) + }) + input.write('a') + input.write('\n') + input.end('b') +}) diff --git a/reverse_engineering/node_modules/ssh2-streams/.travis.yml b/reverse_engineering/node_modules/ssh2-streams/.travis.yml new file mode 100644 index 0000000..4cec49d --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/.travis.yml @@ -0,0 +1,17 @@ +sudo: false +language: cpp +notifications: + email: false +env: + matrix: + - TRAVIS_NODE_VERSION="0.10" + - TRAVIS_NODE_VERSION="0.12" + - TRAVIS_NODE_VERSION="4" + - TRAVIS_NODE_VERSION="6" + - TRAVIS_NODE_VERSION="7" +install: + - rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION + - node --version + - npm --version + - npm install +script: npm test diff --git a/reverse_engineering/node_modules/ssh2-streams/LICENSE b/reverse_engineering/node_modules/ssh2-streams/LICENSE new file mode 100644 index 0000000..3d983a8 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/LICENSE @@ -0,0 +1,19 @@ +Copyright 2014 Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2-streams/README.md b/reverse_engineering/node_modules/ssh2-streams/README.md new file mode 100644 index 0000000..01e8823 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/README.md @@ -0,0 +1,569 @@ +Description +=========== + +SSH2 and SFTP(v3) client/server protocol streams for [node.js](http://nodejs.org/). + +[![Build Status](https://travis-ci.org/mscdex/ssh2-streams.svg?branch=master)](https://travis-ci.org/mscdex/ssh2-streams) + + +Requirements +============ + +* [node.js](http://nodejs.org/) -- v0.10 or newer + + +Install +======= + + npm install ssh2-streams + + +API +=== + +`require('ssh2-streams').SSH2Stream` returns an **_SSH2Stream_** constructor. + +`require('ssh2-streams').SFTPStream` returns an [**_SFTPStream_**](SFTPStream.md) constructor. + +`require('ssh2-streams').utils` returns an _object_ of useful utility functions. + +`require('ssh2-streams').constants` returns an _object_ containing useful SSH protocol constants. + + +SSH2Stream events +----------------- + +**Client/Server events** + +* **header**(< _object_ >headerInfo) - Emitted when the protocol header is seen. `headerInfo` contains: + + * **greeting** - _string_ - (Client-only) An optional greeting message presented by the server. + + * **identRaw** - _string_ - The raw identification string sent by the remote party. + + * **versions** - _object_ - Contains various information parsed from `identRaw`: + + * **protocol** - _string_ - The protocol version (always `1.99` or `2.0`) supported by the remote party. + + * **software** - _string_ - The software name used by the remote party. + + * **comments** - _string_ - Any additional text that comes after the software name. + +* **GLOBAL_REQUEST**(< _string_ >reqName, < _boolean_ >wantReply, < _mixed_ >reqData) + +* **CHANNEL_DATA:\**(< _Buffer_ >data) + +* **CHANNEL_EXTENDED_DATA:\**(< _integer_ >type, < _Buffer_ >data) + +* **CHANNEL_WINDOW_ADJUST:\**(< _integer_ >bytesToAdd) + +* **CHANNEL_SUCCESS:\**() + +* **CHANNEL_FAILURE:\**() + +* **CHANNEL_EOF:\**() + +* **CHANNEL_CLOSE:\**() + +* **CHANNEL_OPEN_CONFIRMATION:\**(< _object_ >channelInfo) - `channelInfo` contains: + + * **recipient** - _integer_ - The local channel number. + + * **sender** - _integer_ - The remote party's channel number. + + * **window** - _integer_ - The initial window size for the channel. + + * **packetSize** - _integer_ - The maximum packet size for the channel. + +* **CHANNEL_OPEN_FAILURE:\**(< _object_ >failInfo) - `failInfo` contains: + + * **recipient** - _integer_ - The local channel number. + + * **reasonCode** - _integer_ - The reason code of the failure. + + * **reason** - _string_ - A text representation of the `reasonCode`. + + * **description** - _string_ - An optional description of the failure. + +* **DISCONNECT**(< _string_ >reason, < _integer_ >reasonCode, < _string_ >description) + +* **DEBUG**(< _string_ >message) + +* **NEWKEYS**() + +* **REQUEST_SUCCESS**([< _Buffer_ >resData]) + +* **REQUEST_FAILURE**() + + + +**Client-only events** + +* **fingerprint**(< _Buffer_ >hostKey, < _function_ >callback) - This event allows you to verify a host's key. If `callback` is called with `true`, the handshake continues. Otherwise a disconnection will occur if `callback` is called with `false`. The default behavior is to auto-allow any host key if there are no handlers for this event. + +* **SERVICE_ACCEPT**(< _string_ >serviceName) + +* **USERAUTH_PASSWD_CHANGEREQ**(< _string_ >message) + +* **USERAUTH_INFO_REQUEST**(< _string_ >name, < _string_ >instructions, < _string_ >lang, < _array_ >prompts) + +* **USERAUTH_PK_OK**() + +* **USERAUTH_SUCCESS**() + +* **USERAUTH_FAILURE**(< _array_ >methodsContinue, < _boolean_ >partialSuccess) + +* **USERAUTH_BANNER**(< _string_ >message) + +* **CHANNEL_OPEN**(< _object_ >channelInfo) - `channelInfo` contains: + + * **type** - _string_ - The channel type (e.g. `x11`, `forwarded-tcpip`). + + * **sender** - _integer_ - The remote party's channel number. + + * **window** - _integer_ - The initial window size for the channel. + + * **packetSize** - _integer_ - The maximum packet size for the channel. + + * **data** - _object_ - The properties available depend on `type`: + + * `x11`: + + * **srcIP** - _string_ - Source IP address of X11 connection request. + + * **srcPort** - _string_ - Source port of X11 connection request. + + * `forwarded-tcpip`: + + * **srcIP** - _string_ - Source IP address of incoming connection. + + * **srcPort** - _string_ - Source port of incoming connection. + + * **destIP** - _string_ - Destination IP address of incoming connection. + + * **destPort** - _string_ - Destination port of incoming connection. + + * `forwarded-streamlocal@openssh.com`: + + * **socketPath** - _string_ - Source socket path of incoming connection. + + * `auth-agent@openssh.com` has no extra data. + +* **CHANNEL_REQUEST:\**(< _object_ >reqInfo) - `reqInfo` properties depend on `reqInfo.request`: + + * `exit-status`: + + * **code** - _integer_ - The exit status code of the remote process. + + * `exit-signal`: + + * **signal** - _string_ - The signal name. + + * **coredump** - _boolean_ - Was the exit the result of a core dump? + + * **description** - _string_ - An optional error message. + + + +**Server-only events** + +* **SERVICE_REQUEST**(< _string_ >serviceName) + +* **USERAUTH_REQUEST**(< _string_ >username, < _string_ >serviceName, < _string_ >authMethod, < _mixed_ >authMethodData) - `authMethodData` depends on `authMethod`: + + * For `password`, it's a _string_ containing the password. + + * For `publickey`, it's an _object_ containing: + + * **keyAlgo** - _string_ - The public key algorithm. + + * **key** - _Buffer_ - The public key data. + + * **signature** - _mixed_ - If set, it is a _Buffer_ containing the signature to be verified. + + * **blob** - _mixed_ - If set, it is a _Buffer_ containing the data to sign. The resulting signature is what is compared to `signature`. + + * For `hostbased`, it's an _object_ including the properties from `publickey` but also: + + * **localHostname** - _string_ - The client's hostname to be verified. + + * **localUsername** - _string_ - The client's (local) username to be verified. + +* **USERAUTH_INFO_RESPONSE**(< _array_ >responses) + +* **GLOBAL_REQUEST**(< _string_ >reqName, < _boolean_ >wantReply, < _mixed_ >reqData) - `reqData` depends on `reqName`: + + * For `tcpip-forward`/`cancel-tcpip-forward`, it's an _object_ containing: + + * **bindAddr** - _string_ - The IP address to start/stop binding to. + + * **bindPort** - _string_ - The port to start/stop binding to. + + * For `streamlocal-forward@openssh.com`/`cancel-streamlocal-forward@openssh.com`, it's an _object_ containing: + + * **socketPath** - _string_ - The socket path to start/stop listening on. + + * For `no-more-sessions@openssh.com`, there is no `reqData`. + + * For any other requests, it's a _Buffer_ containing raw request-specific data *if* there is any extra data. + +* **CHANNEL_OPEN**(< _object_ >channelInfo) - `channelInfo` contains: + + * **type** - _string_ - The channel type (e.g. `session`, `direct-tcpip`). + + * **sender** - _integer_ - The remote party's channel number. + + * **window** - _integer_ - The initial window size for the channel. + + * **packetSize** - _integer_ - The maximum packet size for the channel. + + * **data** - _object_ - The properties available depend on `type`: + + * `direct-tcpip`: + + * **srcIP** - _string_ - Source IP address of outgoing connection. + + * **srcPort** - _string_ - Source port of outgoing connection. + + * **destIP** - _string_ - Destination IP address of outgoing connection. + + * **destPort** - _string_ - Destination port of outgoing connection. + + * `direct-streamlocal@openssh.com`: + + * **socketPath** - _string_ - Destination socket path of outgoing connection. + + * `session` has no extra data. + +* **CHANNEL_REQUEST:\**(< _object_ >reqInfo) - `reqInfo` properties depend on `reqInfo.request`: + + * `pty-req`: + + * **wantReply** - _boolean_ - The client is requesting a response to this request. + + * **term** - _string_ - The terminal type name. + + * **cols** - _integer_ - The number of columns. + + * **rows** - _integer_ - The number of rows. + + * **width** - _integer_ - The width in pixels. + + * **height** - _integer_ - The height in pixels. + + * **modes** - _object_ - The terminal modes. + + * `window-change`: + + * **cols** - _integer_ - The number of columns. + + * **rows** - _integer_ - The number of rows. + + * **width** - _integer_ - The width in pixels. + + * **height** - _integer_ - The height in pixels. + + * `x11-req`: + + * **wantReply** - _boolean_ - The client is requesting a response to this request. + + * **single** - _boolean_ - Whether only a single X11 connection should be allowed. + + * **protocol** - _string_ - The X11 authentication protocol to be used. + + * **cookie** - _string_ - The hex-encoded X11 authentication cookie. + + * **screen** - _integer_ - The screen number for incoming X11 connections. + + * `env`: + + * **wantReply** - _boolean_ - The client is requesting a response to this request. + + * **key** - _string_ - The environment variable name. + + * **val** - _string_ - The environment variable value. + + * `shell`: + + * **wantReply** - _boolean_ - The client is requesting a response to this request. + + * `exec`: + + * **wantReply** - _boolean_ - The client is requesting a response to this request. + + * **command** - _string_ - The command to be executed. + + * `subsystem`: + + * **wantReply** - _boolean_ - The client is requesting a response to this request. + + * **subsystem** - _string_ - The name of the subsystem. + + * `signal`: + + * **signal** - _string_ - The signal name (prefixed with `SIG`). + + * `xon-xoff`: + + * **clientControl** - _boolean_ - Client can/can't perform flow control (control-S/control-Q processing). + + * `auth-agent-req@openssh.com` has no `reqInfo`. + +SSH2Stream properties +--------------------- + +* **bytesSent** - _integer_ - The number of bytes sent since the last keying. This metric can be useful in determining when to call `rekey()`. + +* **bytesReceived** - _integer_ - The number of bytes received since the last keying. This metric can be useful in determining when to call `rekey()`. + + +SSH2Stream methods +------------------ + +* **(constructor)**(< _object_ >config) - Creates and returns a new SSH2Stream instance. SSH2Stream instances are Duplex streams. `config` can contain: + + * **server** - _boolean_ - Set to `true` to create an instance in server mode. **Default:** `false` + + * **hostKeys** - _object_ - If in server mode, an object keyed on host key format (see supported `serverHostKey` values in `algorithms` option below) with values being (decrypted) _Buffer_s or _string_s that contain PEM-encoded (OpenSSH format) host private key(s). **Default:** (none) + + * **greeting** - _string_ - If in server mode, an optional message to send to the user immediately upon connection, before the handshake. **Note:** Most clients usually ignore this. **Default:** (none) + + * **banner** - _string_ - If in server mode, an optional message to send to the user once, right before authentication begins. **Default:** (none) + + * **ident** - _string_ - A custom software name/version identifier. **Default:** `'ssh2js' + moduleVersion + 'srv'` (server mode) `'ssh2js' + moduleVersion` (client mode) + + * **maxPacketSize** - _string_ - This is the maximum packet size that will be accepted. It should be 35000 bytes or larger to be compatible with other SSH2 implementations. **Default:** `35000` + + * **highWaterMark** - _integer_ - This is the `highWaterMark` to use for the stream. **Default:** `32 * 1024` + + * **algorithms** - _object_ - This option allows you to explicitly override the default transport layer algorithms used for the connection. Each value must be an array of valid algorithms for that category. The order of the algorithms in the arrays are important, with the most favorable being first. Valid keys: + + * **kex** - _array_ - Key exchange algorithms. + + * Default values: + + 1. ecdh-sha2-nistp256 **(node v0.11.14 or newer)** + 2. ecdh-sha2-nistp384 **(node v0.11.14 or newer)** + 3. ecdh-sha2-nistp521 **(node v0.11.14 or newer)** + 4. diffie-hellman-group-exchange-sha256 **(node v0.11.12 or newer)** + 5. diffie-hellman-group14-sha1 + + * Supported values: + + * ecdh-sha2-nistp256 **(node v0.11.14 or newer)** + * ecdh-sha2-nistp384 **(node v0.11.14 or newer)** + * ecdh-sha2-nistp521 **(node v0.11.14 or newer)** + * diffie-hellman-group-exchange-sha256 **(node v0.11.12 or newer)** + * diffie-hellman-group14-sha1 + * diffie-hellman-group-exchange-sha1 **(node v0.11.12 or newer)** + * diffie-hellman-group1-sha1 + + * **cipher** - _array_ - Ciphers. + + * Default values: + + 1. aes128-ctr + 2. aes192-ctr + 3. aes256-ctr + 4. aes128-gcm **(node v0.11.12 or newer)** + 5. aes128-gcm@openssh.com **(node v0.11.12 or newer)** + 6. aes256-gcm **(node v0.11.12 or newer)** + 7. aes256-gcm@openssh.com **(node v0.11.12 or newer)** + + * Supported values: + + * aes128-ctr + * aes192-ctr + * aes256-ctr + * aes128-gcm **(node v0.11.12 or newer)** + * aes128-gcm@openssh.com **(node v0.11.12 or newer)** + * aes256-gcm **(node v0.11.12 or newer)** + * aes256-gcm@openssh.com **(node v0.11.12 or newer)** + * aes256-cbc + * aes192-cbc + * aes128-cbc + * blowfish-cbc + * 3des-cbc + * arcfour256 + * arcfour128 + * cast128-cbc + * arcfour + + * **serverHostKey** - _array_ - Server host key formats. In server mode, this list must agree with the host private keys set in the `hostKeys` config setting. + + * Default values: + + 1. ssh-rsa + 2. ecdsa-sha2-nistp256 **(node v5.2.0 or newer)** + 3. ecdsa-sha2-nistp384 **(node v5.2.0 or newer)** + 4. ecdsa-sha2-nistp521 **(node v5.2.0 or newer)** + + * Supported values: + + * ssh-rsa + * ecdsa-sha2-nistp256 **(node v5.2.0 or newer)** + * ecdsa-sha2-nistp384 **(node v5.2.0 or newer)** + * ecdsa-sha2-nistp521 **(node v5.2.0 or newer)** + * ssh-dss + + * **hmac** - _array_ - (H)MAC algorithms. + + * Default values: + + 1. hmac-sha2-256 + 2. hmac-sha2-512 + 3. hmac-sha1 + + * Supported values: + + * hmac-sha2-256 + * hmac-sha2-512 + * hmac-sha1 + * hmac-md5 + * hmac-sha2-256-96 + * hmac-sha2-512-96 + * hmac-ripemd160 + * hmac-sha1-96 + * hmac-md5-96 + + * **compress** - _array_ - Compression algorithms. + + * Default values: + + 1. none + 2. zlib@openssh.com + 3. zlib + + * Supported values: + + * none + * zlib@openssh.com + * zlib + + * **debug** - _function_ - Set this to a function that receives a single string argument to get detailed (local) debug information. **Default:** (none) + + + +**Client/Server methods** + +* **ping**() - _boolean_ - Writes a dummy GLOBAL_REQUEST packet (specifically "keepalive@openssh.com") that requests a reply. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **disconnect**([< _integer_ >reasonCode]) - _boolean_ - Writes a disconnect packet and closes the stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **rekey**() - _boolean_ - Starts the re-keying process. Incoming/Outgoing packets are buffered until the re-keying process has finished. Returns `false` to indicate that no more packets should be written until the `NEWKEYS` event is seen. + +* **requestSuccess**([< _Buffer_ >data]) - _boolean_ - Writes a request success packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **requestFailure**() - _boolean_ - Writes a request failure packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelSuccess**() - _boolean_ - Writes a channel success packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelFailure**() - _boolean_ - Writes a channel failure packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelEOF**(< _integer_ >channel) - _boolean_ - Writes a channel EOF packet for the given `channel`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelClose**(< _integer_ >channel) - _boolean_ - Writes a channel close packet for the given `channel`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelWindowAdjust**(< _integer_ >channel, < _integer_ >amount) - _boolean_ - Writes a channel window adjust packet for the given `channel` where `amount` is the number of bytes to add to the channel window. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelData**(< _integer_ >channel, < _mixed_ >data) - _boolean_ - Writes a channel data packet for the given `channel` where `data` is a _Buffer_ or _string_. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelExtData**(< _integer_ >channel, < _mixed_ >data, < _integer_ >type) - _boolean_ - Writes a channel extended data packet for the given `channel` where `data is a _Buffer_ or _string_. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelOpenConfirm**(< _integer_ >remoteChannel, < _integer_ >localChannel, < _integer_ >initWindow, < _integer_ >maxPacket) - _boolean_ - Writes a channel open confirmation packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **channelOpenFail**(< _integer_ >remoteChannel, < _integer_ >reasonCode[, < _string_ >description]) - _boolean_ - Writes a channel open failure packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + + +**Client-only methods** + +* **service**(< _string_ >serviceName) - _boolean_ - Writes a service request packet for `serviceName`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **tcpipForward**(< _string_ >bindAddr, < _integer_ >bindPort[, < _boolean_ >wantReply]) - _boolean_ - Writes a tcpip forward global request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **cancelTcpipForward**(< _string_ >bindAddr, < _integer_ >bindPort[, < _boolean_ >wantReply]) - _boolean_ - Writes a cancel tcpip forward global request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authPassword**(< _string_ >username, < _string_ >password) - _boolean_ - Writes a password userauth request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authPK**(< _string_ >username, < _object_ >pubKey[, < _function_ >cbSign]) - _boolean_ - Writes a publickey userauth request packet. `pubKey` is the object returned from using `utils.parseKey()` on a private or public key. If `cbSign` is not present, a pubkey check userauth packet is written. Otherwise `cbSign` is called with `(blob, callback)`, where `blob` is the data to sign with the private key and the resulting signature _Buffer_ is passed to `callback` as the first argument. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authHostbased**(< _string_ >username, < _object_ >pubKey, < _string_ >localHostname, < _string_ >localUsername, < _function_ >cbSign) - _boolean_ - Writes a hostbased userauth request packet. `pubKey` is the object returned from using `utils.parseKey()` on a private or public key. `cbSign` is called with `(blob, callback)`, where `blob` is the data to sign with the private key and the resulting signature _Buffer_ is passed to `callback` as the first argument. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authKeyboard**(< _string_ >username) - _boolean_ - Writes a keyboard-interactive userauth request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authNone**(< _string_ >username) - _boolean_ - Writes a "none" userauth request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authInfoRes**(< _array_ >responses) - _boolean_ - Writes a userauth info response packet. `responses` is an _array_ of zero or more strings corresponding to responses to prompts previously sent by the server. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **directTcpip**(< _integer_ >channel, < _integer_ >initWindow, < _integer_ >maxPacket, < _object_ >config) - _boolean_ - Writes a direct tcpip channel open packet. `config` must contain `srcIP`, `srcPort`, `dstIP`, and `dstPort`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **session**(< _integer_ >channel, < _integer_ >initWindow, < _integer_ >maxPacket) - _boolean_ - Writes a session channel open packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_agentForward**(< _integer_ >channel[, < _boolean_ >wantReply]) - _boolean_ - Writes an `auth-agent-req@openssh.com` channel request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **windowChange**(< _integer_ >channel, < _integer_ >rows, < _integer_ >cols, < _integer_ >height, < _integer_ >width) - _boolean_ - Writes a window change channel request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **pty**(< _integer_ >channel, < _integer_ >rows, < _integer_ >cols, < _integer_ >height, < _integer_ >width, < _string_ >terminalType, < _mixed_ >terminalModes[, < _boolean_ >wantReply]) - _boolean_ - Writes a pty channel request packet. If `terminalType` is falsey, `vt100` is used. `terminalModes` can be the raw bytes, an _object_ of the terminal modes to set, or a falsey value for no modes. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **env**(< _integer_ >channel, < _string_ >key, < _mixed_ >value[, < _boolean_ >wantReply]) - _boolean_ - Writes an env channel request packet. `value` can be a _string_ or _Buffer_. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **shell**(< _integer_ >channel[, < _boolean_ >wantReply]) - _boolean_ - Writes a shell channel request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **exec**(< _integer_ >channel, < _string_ >command[, < _boolean_ >wantReply]) - _boolean_ - Writes an exec channel request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **signal**(< _integer_ >channel, < _string_ >signalName) - _boolean_ - Writes a signal channel request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **x11Forward**(< _integer_ >channel, < _object_ >config[, < _boolean_ >wantReply]) - _boolean_ - Writes an X11 forward channel request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `config` can contain: + + * **single** - _boolean_ - `true` if only a single connection should be forwarded. + + * **protocol** - _string_ - The name of the X11 authentication method used (e.g. `MIT-MAGIC-COOKIE-1`). + + * **cookie** - _string_ - The X11 authentication cookie encoded in hexadecimal. + + * **screen** - _integer_ - The screen number to forward X11 connections for. + +* **subsystem**(< _integer_ >channel, < _string_ >name[, < _boolean_ >wantReply]) - _boolean_ - Writes a subsystem channel request packet. `name` is the name of the subsystem (e.g. `sftp` or `netconf`). `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_noMoreSessions**([< _boolean_ >wantReply]) - _boolean_ - Writes a no-more-sessions@openssh.com request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_streamLocalForward**(< _string_ >socketPath[, < _boolean_ >wantReply]) - _boolean_ - Writes a streamlocal-forward@openssh.com request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_cancelStreamLocalForward**(< _string_ >socketPath[, < _boolean_ >wantReply]) - _boolean_ - Writes a cancel-streamlocal-forward@openssh.com request packet. `wantReply` defaults to `true`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_directStreamLocal**(< _integer_ >channel, < _integer_ >initWindow, < _integer_ >maxPacket, < _object_ >config) - _boolean_ - Writes a direct-streamlocal@openssh.com channel open packet. `config` must contain `socketPath`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + + +**Server-only methods** + +* **serviceAccept**(< _string_ >serviceName) - _boolean_ - Writes a service accept packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authFailure**([< _array_ >authMethods[, < _boolean_ >partialSuccess]]) - _boolean_ - Writes a userauth failure packet. `authMethods` is an _array_ of authentication methods that can continue. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authSuccess**() - _boolean_ - Writes a userauth success packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authPKOK**(< _string_ >keyAlgorithm, < _Buffer_ >keyData) - _boolean_ - Writes a userauth PK OK packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **authInfoReq**(< _string_ >name, < _string_ >instructions, < _array_ >prompts) - _boolean_ - Writes a userauth info request packet. `prompts` is an array of `{ prompt: 'Prompt text', echo: true }` objects (`prompt` being the prompt text and `echo` indicating whether the client's response to the prompt should be echoed to their display). Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **forwardedTcpip**(< _integer_ >channel, < _integer_ >initWindow, < _integer_ >maxPacket, < _object_ >info) - _boolean_ - Writes a forwarded tcpip channel open packet. `info` must contain `boundAddr`, `boundPort`, `remoteAddr`, and `remotePort`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **x11**(< _integer_ >channel, < _integer_ >initWindow, < _integer_ >maxPacket, < _object_ >info) - _boolean_ - Writes an X11 channel open packet. `info` must contain `originAddr` and `originPort`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_forwardedStreamLocal**(< _integer_ >channel, < _integer_ >initWindow, < _integer_ >maxPacket, < _object_ >info) - _boolean_ - Writes an forwarded-streamlocal@openssh.com channel open packet. `info` must contain `socketPath`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **exitStatus**(< _integer_ >channel, < _integer_ >exitCode) - _boolean_ - Writes an exit status channel request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **exitSignal**(< _integer_ >channel, < _string_ >signalName, < _boolean_ >coreDumped, < _string_ >errorMessage) - _boolean_ - Writes an exit signal channel request packet. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + +Utility methods +--------------- + +* **parseKey**(< _mixed_ >keyData) - _object_ - Parses a private/public key in OpenSSH and RFC4716 formats. + +* **decryptKey**(< _object_ >privKeyInfo, < _string_ >passphrase) - _(void)_ - Takes a private key parsed with `parseKey()` and decrypts it with `passphrase`. The decrypted key data overwrites the original encrypted copy. + +* **genPublicKey**(< _object_ >privKeyInfo) - _object_ - Takes a private key parsed with `parseKey()` and generates the associated public key and returns the public key information in the same format as `parseKey()`. diff --git a/reverse_engineering/node_modules/ssh2-streams/SFTPStream.md b/reverse_engineering/node_modules/ssh2-streams/SFTPStream.md new file mode 100644 index 0000000..f44e5bd --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/SFTPStream.md @@ -0,0 +1,405 @@ +SFTPStream events +----------------- + +**Client/Server events** + +* **ready**() - Emitted after initial protocol version check has passed. + +**Server-only events** + +_Responses to these client requests are sent using one of the methods listed further in this document under `Server-only methods`. The valid response(s) for each request are documented below._ + +* **OPEN**(< _integer_ >reqID, < _string_ >filename, < _integer_ >flags, < _ATTRS_ >attrs) + + `flags` is a bitfield containing any of the flags defined in + `SFTPStream.OPEN_MODE`. Use the static method `SFTPStream.flagsToString()` + to convert the value to a mode string to be used by `fs.open()` (e.g. `'r'`). + + Respond using one of the following: + + * `handle()` - This indicates a successful opening of the file and passes + the given handle back to the client to use to refer to this open file for + future operations (e.g. reading, writing, closing). + + * `status()` - Use this to indicate a failure to open the requested file. + +* **READ**(< _integer_ >reqID, < _Buffer_ >handle, < _integer_ >offset, < _integer_ >length) + + Respond using one of the following: + + * `data()` - Use this to send the requested chunk of data back to the client. + The amount of data sent is allowed to be less than the `length` requested, + for example if the file ends between `offset` and `offset + length`. + + * `status()` - Use this to indicate either end of file (`STATUS_CODE.EOF`) + has been reached (`offset` is past the end of the file) or if an error + occurred while reading the requested part of the file. + +* **WRITE**(< _integer_ >reqID, < _Buffer_ >handle, < _integer_ >offset, < _Buffer_ >data) + + Respond using: + + * `status()` - Use this to indicate success/failure of the write to the file. + +* **FSTAT**(< _integer_ >reqID, < _Buffer_ >handle) + + Respond using one of the following: + + * `attrs()` - Use this to send the attributes for the requested + file/directory back to the client. + + * `status()` - Use this to indicate an error occurred while accessing the + file/directory. + +* **FSETSTAT**(< _integer_ >reqID, < _Buffer_ >handle, < _ATTRS_ >attrs) + + Respond using: + + * `status()` - Use this to indicates success/failure of the setting of the + given file/directory attributes. + +* **CLOSE**(< _integer_ >reqID, < _Buffer_ >handle) + + Respond using: + + * `status()` - Use this to indicate success (`STATUS_CODE.OK`) or failure of + the closing of the file identified by `handle`. + +* **OPENDIR**(< _integer_ >reqID, < _string_ >path) + + Respond using one of the following: + + * `handle()` - This indicates a successful opening of the directory and + passes the given handle back to the client to use to refer to this open + directory for future operations (e.g. reading directory contents, closing). + + * `status()` - Use this to indicate a failure to open the requested + directory. + +* **READDIR**(< _integer_ >reqID, < _Buffer_ >handle) + + Respond using one of the following: + + * `name()` - Use this to send one or more directory listings for the open + directory back to the client. + + * `status()` - Use this to indicate either end of directory contents + (`STATUS_CODE.EOF`) or if an error occurred while reading the directory + contents. + +* **LSTAT**(< _integer_ >reqID, < _string_ >path) + + Respond using one of the following: + + * `attrs()` - Use this to send the attributes for the requested + file/directory back to the client. + + * `status()` - Use this to indicate an error occurred while accessing the + file/directory. + +* **STAT**(< _integer_ >reqID, < _string_ >path) + + Respond using one of the following: + + * `attrs()` - Use this to send the attributes for the requested + file/directory back to the client. + + * `status()` - Use this to indicate an error occurred while accessing the + file/directory. + +* **REMOVE**(< _integer_ >reqID, < _string_ >path) + + Respond using: + + * `status()` - Use this to indicate success/failure of the removal of the + file at `path`. + +* **RMDIR**(< _integer_ >reqID, < _string_ >path) + + Respond using: + + * `status()` - Use this to indicate success/failure of the removal of the + directory at `path`. + +* **REALPATH**(< _integer_ >reqID, < _string_ >path) + + Respond using one of the following: + + * `name()` - Use this to respond with a normalized version of `path`. + No file/directory attributes are required to be sent in this response. + + * `status()` - Use this to indicate a failure in normalizing `path`. + +* **READLINK**(< _integer_ >reqID, < _string_ >path) + + Respond using one of the following: + + * `name()` - Use this to respond with the target of the symlink at `path`. + No file/directory attributes are required to be sent in this response. + + * `status()` - Use this to indicate a failure in reading the symlink at + `path`. + +* **SETSTAT**(< _integer_ >reqID, < _string_ >path, < _ATTRS_ >attrs) + + Respond using: + + * `status()` - Use this to indicates success/failure of the setting of the + given file/directory attributes. + +* **MKDIR**(< _integer_ >reqID, < _string_ >path, < _ATTRS_ >attrs) + + Respond using: + + * `status()` - Use this to indicate success/failure of the creation of the + directory at `path`. + +* **RENAME**(< _integer_ >reqID, < _string_ >oldPath, < _string_ >newPath) + + Respond using: + + * `status()` - Use this to indicate success/failure of the renaming of the + file/directory at `oldPath` to `newPath`. + +* **SYMLINK**(< _integer_ >reqID, < _string_ >linkPath, < _string_ >targetPath) + + Respond using: + + * `status()` - Use this to indicate success/failure of the symlink creation. + + +SFTPStream static constants +--------------------------- + +* **SFTPStream.STATUS_CODE** - _object_ - Contains the various status codes (for use especially with `status()`): + + * `OK` + + * `EOF` + + * `NO_SUCH_FILE` + + * `PERMISSION_DENIED` + + * `FAILURE` + + * `BAD_MESSAGE` + + * `OP_UNSUPPORTED` + +* **SFTPStream.OPEN_MODE** - _object_ - Contains the various open file flags: + + * `READ` + + * `WRITE` + + * `APPEND` + + * `CREAT` + + * `TRUNC` + + * `EXCL` + + +SFTPStream static methods +------------------------- + +* **SFTPStream.stringToFlags**(< _string_ >flagsStr) - _integer_ - Converts string flags (e.g. `'r'`, `'a+'`, etc.) to the appropriate `SFTPStream.OPEN_MODE` flag mask. Returns `null` if conversion failed. + +* **SFTPStream.flagsToString**(< _integer_ >flagsMask) - _string_ - Converts flag mask (e.g. number containing `SFTPStream.OPEN_MODE` values) to the appropriate string value. Returns `null` if conversion failed. + + +SFTPStream methods +------------------ + +* **(constructor)**(< _object_ >config[, < _string_ >remoteIdentRaw]) - Creates and returns a new SFTPStream instance. SFTPStream instances are Duplex streams. `remoteIdentRaw` can be the raw SSH identification string of the remote party. This is used to change internal behavior based on particular SFTP implementations. `config` can contain: + + * **server** - _boolean_ - Set to `true` to create an instance in server mode. **Default:** `false` + + * **highWaterMark** - _integer_ - This is the `highWaterMark` to use for the stream. **Default:** `32 * 1024` + + * **debug** - _function_ - Set this to a function that receives a single string argument to get detailed (local) debug information. **Default:** (none) + + + +**Client-only methods** + +* **fastGet**(< _string_ >remotePath, < _string_ >localPath[, < _object_ >options], < _function_ >callback) - _(void)_ - Downloads a file at `remotePath` to `localPath` using parallel reads for faster throughput. `options` can have the following properties: + + * **concurrency** - _integer_ - Number of concurrent reads **Default:** `64` + + * **chunkSize** - _integer_ - Size of each read in bytes **Default:** `32768` + + * **step** - _function_(< _integer_ >total_transferred, < _integer_ >chunk, < _integer_ >total) - Called every time a part of a file was transferred + + `callback` has 1 parameter: < _Error_ >err. + +* **fastPut**(< _string_ >localPath, < _string_ >remotePath[, < _object_ >options], < _function_ >callback) - _(void)_ - Uploads a file from `localPath` to `remotePath` using parallel reads for faster throughput. `options` can have the following properties: + + * **concurrency** - _integer_ - Number of concurrent reads **Default:** `64` + + * **chunkSize** - _integer_ - Size of each read in bytes **Default:** `32768` + + * **step** - _function_(< _integer_ >total_transferred, < _integer_ >chunk, < _integer_ >total) - Called every time a part of a file was transferred + + * **mode** - _mixed_ - Integer or string representing the file mode to set for the uploaded file. + + `callback` has 1 parameter: < _Error_ >err. + +* **createReadStream**(< _string_ >path[, < _object_ >options]) - _ReadStream_ - Returns a new readable stream for `path`. `options` has the following defaults: + + ```javascript + { flags: 'r', + encoding: null, + handle: null, + mode: 0o666, + autoClose: true + } + ``` + + `options` can include `start` and `end` values to read a range of bytes from the file instead of the entire file. Both `start` and `end` are inclusive and start at 0. The `encoding` can be `'utf8'`, `'ascii'`, or `'base64'`. + + If `autoClose` is false, then the file handle won't be closed, even if there's an error. It is your responsiblity to close it and make sure there's no file handle leak. If `autoClose` is set to true (default behavior), on `error` or `end` the file handle will be closed automatically. + + An example to read the last 10 bytes of a file which is 100 bytes long: + + ```javascript + sftp.createReadStream('sample.txt', {start: 90, end: 99}); + ``` + +* **createWriteStream**(< _string_ >path[, < _object_ >options]) - _WriteStream_ - Returns a new writable stream for `path`. `options` has the following defaults: + + ```javascript + { + flags: 'w', + encoding: null, + mode: 0o666, + autoClose: true + } + ``` + + `options` may also include a `start` option to allow writing data at some position past the beginning of the file. Modifying a file rather than replacing it may require a flags mode of 'r+' rather than the default mode 'w'. + + If 'autoClose' is set to false and you pipe to this stream, this stream will not automatically close after there is no more data upstream -- allowing future pipes and/or manual writes. + +* **open**(< _string_ >filename, < _string_ >flags, [< _mixed_ >attrs_mode, ]< _function_ >callback) - _boolean_ - Opens a file `filename` with `flags` with optional _ATTRS_ object or file mode `attrs_mode`. `flags` is any of the flags supported by `fs.open` (except sync flag). Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _Buffer_ >handle. + +* **close**(< _Buffer_ >handle, < _function_ >callback) - _boolean_ - Closes the resource associated with `handle` given by open() or opendir(). Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **readData**(< _Buffer_ >handle, < _Buffer_ >buffer, < _integer_ >offset, < _integer_ >length, < _integer_ >position, < _function_ >callback) - _boolean_ - Reads `length` bytes from the resource associated with `handle` starting at `position` and stores the bytes in `buffer` starting at `offset`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 4 parameters: < _Error_ >err, < _integer_ >bytesRead, < _Buffer_ >buffer (offset adjusted), < _integer_ >position. + +* **writeData**(< _Buffer_ >handle, < _Buffer_ >buffer, < _integer_ >offset, < _integer_ >length, < _integer_ >position, < _function_ >callback) - _boolean_ - Writes `length` bytes from `buffer` starting at `offset` to the resource associated with `handle` starting at `position`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **fstat**(< _Buffer_ >handle, < _function_ >callback) - _boolean_ - Retrieves attributes for the resource associated with `handle`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _Stats_ >stats. + +* **fsetstat**(< _Buffer_ >handle, < _ATTRS_ >attributes, < _function_ >callback) - _boolean_ - Sets the attributes defined in `attributes` for the resource associated with `handle`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **futimes**(< _Buffer_ >handle, < _mixed_ >atime, < _mixed_ >mtime, < _function_ >callback) - _boolean_ - Sets the access time and modified time for the resource associated with `handle`. `atime` and `mtime` can be Date instances or UNIX timestamps. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **fchown**(< _Buffer_ >handle, < _integer_ >uid, < _integer_ >gid, < _function_ >callback) - _boolean_ - Sets the owner for the resource associated with `handle`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **fchmod**(< _Buffer_ >handle, < _mixed_ >mode, < _function_ >callback) - _boolean_ - Sets the mode for the resource associated with `handle`. `mode` can be an integer or a string containing an octal number. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **opendir**(< _string_ >path, < _function_ >callback) - _boolean_ - Opens a directory `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _Buffer_ >handle. + +* **readdir**(< _mixed_ >location, < _function_ >callback) - _boolean_ - Retrieves a directory listing. `location` can either be a _Buffer_ containing a valid directory handle from opendir() or a _string_ containing the path to a directory. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _mixed_ >list. `list` is an _Array_ of `{ filename: 'foo', longname: '....', attrs: {...} }` style objects (attrs is of type _ATTR_). If `location` is a directory handle, this function may need to be called multiple times until `list` is boolean false, which indicates that no more directory entries are available for that directory handle. + +* **unlink**(< _string_ >path, < _function_ >callback) - _boolean_ - Removes the file/symlink at `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **rename**(< _string_ >srcPath, < _string_ >destPath, < _function_ >callback) - _boolean_ - Renames/moves `srcPath` to `destPath`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **mkdir**(< _string_ >path, [< _ATTRS_ >attributes, ]< _function_ >callback) - _boolean_ - Creates a new directory `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **rmdir**(< _string_ >path, < _function_ >callback) - _boolean_ - Removes the directory at `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **stat**(< _string_ >path, < _function_ >callback) - _boolean_ - Retrieves attributes for `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameter: < _Error_ >err, < _Stats_ >stats. + +* **lstat**(< _string_ >path, < _function_ >callback) - _boolean_ - Retrieves attributes for `path`. If `path` is a symlink, the link itself is stat'ed instead of the resource it refers to. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _Stats_ >stats. + +* **setstat**(< _string_ >path, < _ATTRS_ >attributes, < _function_ >callback) - _boolean_ - Sets the attributes defined in `attributes` for `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **utimes**(< _string_ >path, < _mixed_ >atime, < _mixed_ >mtime, < _function_ >callback) - _boolean_ - Sets the access time and modified time for `path`. `atime` and `mtime` can be Date instances or UNIX timestamps. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **chown**(< _string_ >path, < _integer_ >uid, < _integer_ >gid, < _function_ >callback) - _boolean_ - Sets the owner for `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **chmod**(< _string_ >path, < _mixed_ >mode, < _function_ >callback) - _boolean_ - Sets the mode for `path`. `mode` can be an integer or a string containing an octal number. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **readlink**(< _string_ >path, < _function_ >callback) - _boolean_ - Retrieves the target for a symlink at `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _string_ >target. + +* **symlink**(< _string_ >targetPath, < _string_ >linkPath, < _function_ >callback) - _boolean_ - Creates a symlink at `linkPath` to `targetPath`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **realpath**(< _string_ >path, < _function_ >callback) - _boolean_ - Resolves `path` to an absolute path. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _string_ >absPath. + +* **ext_openssh_rename**(< _string_ >srcPath, < _string_ >destPath, < _function_ >callback) - _boolean_ - **OpenSSH extension** Performs POSIX rename(3) from `srcPath` to `destPath`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **ext_openssh_statvfs**(< _string_ >path, < _function_ >callback) - _boolean_ - **OpenSSH extension** Performs POSIX statvfs(2) on `path`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _object_ >fsInfo. `fsInfo` contains the information as found in the [statvfs struct](http://linux.die.net/man/2/statvfs). + +* **ext_openssh_fstatvfs**(< _Buffer_ >handle, < _function_ >callback) - _boolean_ - **OpenSSH extension** Performs POSIX fstatvfs(2) on open handle `handle`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _object_ >fsInfo. `fsInfo` contains the information as found in the [statvfs struct](http://linux.die.net/man/2/statvfs). + +* **ext_openssh_hardlink**(< _string_ >targetPath, < _string_ >linkPath, < _function_ >callback) - _boolean_ - **OpenSSH extension** Performs POSIX link(2) to create a hard link to `targetPath` at `linkPath`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + +* **ext_openssh_fsync**(< _Buffer_ >handle, < _function_ >callback) - _boolean_ - **OpenSSH extension** Performs POSIX fsync(3) on the open handle `handle`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 1 parameter: < _Error_ >err. + + +**Server-only methods** + +* **status**(< _integer_ >reqID, < _integer_ >statusCode[, < _string_ >message]) - _boolean_ - Sends a status response for the request identified by `id`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **handle**(< _integer_ >reqID, < _Buffer_ >handle) - _boolean_ - Sends a handle response for the request identified by `id`. `handle` must be less than 256 bytes and is an opaque value that could merely contain the value of a backing file descriptor or some other unique, custom value. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **data**(< _integer_ >reqID, < _mixed_ >data[, < _string_ >encoding]) - _boolean_ - Sends a data response for the request identified by `id`. `data` can be a _Buffer_ or _string_. If `data` is a string, `encoding` is the encoding of `data`. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **name**(< _integer_ >reqID, < _array_ >names) - _boolean_ - Sends a name response for the request identified by `id`. Returns `false` if you should wait for the `continue` event before sending any more traffic. `names` must be an _array_ of _object_ where each _object_ can contain: + + * **filename** - _string_ - The entry's name. + + * **longname** - _string_ - This is the `ls -l`-style format for the entry (e.g. `-rwxr--r-- 1 bar bar 718 Dec 8 2009 foo`) + + * **attrs** - _ATTRS_ - This is an optional _ATTRS_ object that contains requested/available attributes for the entry. + +* **attrs**(< _integer_ >reqID, < _ATTRS_ >attrs) - _boolean_ - Sends an attrs response for the request identified by `id`. `attrs` contains the requested/available attributes. + + +ATTRS +----- + +An object with the following valid properties: + +* **mode** - _integer_ - Mode/permissions for the resource. + +* **uid** - _integer_ - User ID of the resource. + +* **gid** - _integer_ - Group ID of the resource. + +* **size** - _integer_ - Resource size in bytes. + +* **atime** - _integer_ - UNIX timestamp of the access time of the resource. + +* **mtime** - _integer_ - UNIX timestamp of the modified time of the resource. + +When supplying an ATTRS object to one of the SFTP methods: + +* `atime` and `mtime` can be either a Date instance or a UNIX timestamp. + +* `mode` can either be an integer or a string containing an octal number. + + +Stats +----- + +An object with the same attributes as an ATTRS object with the addition of the following methods: + +* `stats.isDirectory()` + +* `stats.isFile()` + +* `stats.isBlockDevice()` + +* `stats.isCharacterDevice()` + +* `stats.isSymbolicLink()` + +* `stats.isFIFO()` + +* `stats.isSocket()` diff --git a/reverse_engineering/node_modules/ssh2-streams/index.js b/reverse_engineering/node_modules/ssh2-streams/index.js new file mode 100644 index 0000000..ac17b72 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/index.js @@ -0,0 +1,6 @@ +module.exports = { + SFTPStream: require('./lib/sftp'), + SSH2Stream: require('./lib/ssh'), + utils: require('./lib/utils'), + constants: require('./lib/constants') +}; \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2-streams/lib/constants.js b/reverse_engineering/node_modules/ssh2-streams/lib/constants.js new file mode 100644 index 0000000..5feedb0 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/lib/constants.js @@ -0,0 +1,386 @@ +var semver = require('semver'); + +var i; +var keys; +var len; + +var MESSAGE = exports.MESSAGE = { + // Transport layer protocol -- generic (1-19) + DISCONNECT: 1, + IGNORE: 2, + UNIMPLEMENTED: 3, + DEBUG: 4, + SERVICE_REQUEST: 5, + SERVICE_ACCEPT: 6, + + // Transport layer protocol -- algorithm negotiation (20-29) + KEXINIT: 20, + NEWKEYS: 21, + + // Transport layer protocol -- key exchange method-specific (30-49) + + // User auth protocol -- generic (50-59) + USERAUTH_REQUEST: 50, + USERAUTH_FAILURE: 51, + USERAUTH_SUCCESS: 52, + USERAUTH_BANNER: 53, + + // User auth protocol -- user auth method-specific (60-79) + + // Connection protocol -- generic (80-89) + GLOBAL_REQUEST: 80, + REQUEST_SUCCESS: 81, + REQUEST_FAILURE: 82, + + // Connection protocol -- channel-related (90-127) + CHANNEL_OPEN: 90, + CHANNEL_OPEN_CONFIRMATION: 91, + CHANNEL_OPEN_FAILURE: 92, + CHANNEL_WINDOW_ADJUST: 93, + CHANNEL_DATA: 94, + CHANNEL_EXTENDED_DATA: 95, + CHANNEL_EOF: 96, + CHANNEL_CLOSE: 97, + CHANNEL_REQUEST: 98, + CHANNEL_SUCCESS: 99, + CHANNEL_FAILURE: 100 + + // Reserved for client protocols (128-191) + + // Local extensions (192-155) +}; +for (i = 0, keys = Object.keys(MESSAGE), len = keys.length; i < len; ++i) + MESSAGE[MESSAGE[keys[i]]] = keys[i]; +// context-specific message codes: +MESSAGE.KEXDH_INIT = 30; +MESSAGE.KEXDH_REPLY = 31; +MESSAGE.KEXDH_GEX_REQUEST = 34; +MESSAGE.KEXDH_GEX_GROUP = 31; +MESSAGE.KEXDH_GEX_INIT = 32; +MESSAGE.KEXDH_GEX_REPLY = 33; +MESSAGE.KEXECDH_INIT = 30; // included here for completeness +MESSAGE.KEXECDH_REPLY = 31; // included here for completeness +MESSAGE.USERAUTH_PASSWD_CHANGEREQ = 60; +MESSAGE.USERAUTH_PK_OK = 60; +MESSAGE.USERAUTH_INFO_REQUEST = 60; +MESSAGE.USERAUTH_INFO_RESPONSE = 61; + +var DYNAMIC_KEXDH_MESSAGE = exports.DYNAMIC_KEXDH_MESSAGE = {}; +DYNAMIC_KEXDH_MESSAGE[MESSAGE.KEXDH_GEX_GROUP] = 'KEXDH_GEX_GROUP'; +DYNAMIC_KEXDH_MESSAGE[MESSAGE.KEXDH_GEX_REPLY] = 'KEXDH_GEX_REPLY'; + +var KEXDH_MESSAGE = exports.KEXDH_MESSAGE = {}; +KEXDH_MESSAGE[MESSAGE.KEXDH_INIT] = 'KEXDH_INIT'; +KEXDH_MESSAGE[MESSAGE.KEXDH_REPLY] = 'KEXDH_REPLY'; + +var DISCONNECT_REASON = exports.DISCONNECT_REASON = { + HOST_NOT_ALLOWED_TO_CONNECT: 1, + PROTOCOL_ERROR: 2, + KEY_EXCHANGE_FAILED: 3, + RESERVED: 4, + MAC_ERROR: 5, + COMPRESSION_ERROR: 6, + SERVICE_NOT_AVAILABLE: 7, + PROTOCOL_VERSION_NOT_SUPPORTED: 8, + HOST_KEY_NOT_VERIFIABLE: 9, + CONNECTION_LOST: 10, + BY_APPLICATION: 11, + TOO_MANY_CONNECTIONS: 12, + AUTH_CANCELED_BY_USER: 13, + NO_MORE_AUTH_METHODS_AVAILABLE: 14, + ILLEGAL_USER_NAME: 15 +}; +for (i = 0, keys = Object.keys(DISCONNECT_REASON), len = keys.length; + i < len; + ++i) { + DISCONNECT_REASON[DISCONNECT_REASON[keys[i]]] = keys[i]; +} + +var CHANNEL_OPEN_FAILURE = exports.CHANNEL_OPEN_FAILURE = { + ADMINISTRATIVELY_PROHIBITED: 1, + CONNECT_FAILED: 2, + UNKNOWN_CHANNEL_TYPE: 3, + RESOURCE_SHORTAGE: 4 +}; +for (i = 0, keys = Object.keys(CHANNEL_OPEN_FAILURE), len = keys.length; + i < len; + ++i) { + CHANNEL_OPEN_FAILURE[CHANNEL_OPEN_FAILURE[keys[i]]] = keys[i]; +} + +var TERMINAL_MODE = exports.TERMINAL_MODE = { + TTY_OP_END: 0, // Indicates end of options. + VINTR: 1, // Interrupt character; 255 if none. Similarly for the + // other characters. Not all of these characters are + // supported on all systems. + VQUIT: 2, // The quit character (sends SIGQUIT signal on POSIX + // systems). + VERASE: 3, // Erase the character to left of the cursor. + VKILL: 4, // Kill the current input line. + VEOF: 5, // End-of-file character (sends EOF from the terminal). + VEOL: 6, // End-of-line character in addition to carriage return + // and/or linefeed. + VEOL2: 7, // Additional end-of-line character. + VSTART: 8, // Continues paused output (normally control-Q). + VSTOP: 9, // Pauses output (normally control-S). + VSUSP: 10, // Suspends the current program. + VDSUSP: 11, // Another suspend character. + VREPRINT: 12, // Reprints the current input line. + VWERASE: 13, // Erases a word left of cursor. + VLNEXT: 14, // Enter the next character typed literally, even if it + // is a special character + VFLUSH: 15, // Character to flush output. + VSWTCH: 16, // Switch to a different shell layer. + VSTATUS: 17, // Prints system status line (load, command, pid, etc). + VDISCARD: 18, // Toggles the flushing of terminal output. + IGNPAR: 30, // The ignore parity flag. The parameter SHOULD be 0 + // if this flag is FALSE, and 1 if it is TRUE. + PARMRK: 31, // Mark parity and framing errors. + INPCK: 32, // Enable checking of parity errors. + ISTRIP: 33, // Strip 8th bit off characters. + INLCR: 34, // Map NL into CR on input. + IGNCR: 35, // Ignore CR on input. + ICRNL: 36, // Map CR to NL on input. + IUCLC: 37, // Translate uppercase characters to lowercase. + IXON: 38, // Enable output flow control. + IXANY: 39, // Any char will restart after stop. + IXOFF: 40, // Enable input flow control. + IMAXBEL: 41, // Ring bell on input queue full. + ISIG: 50, // Enable signals INTR, QUIT, [D]SUSP. + ICANON: 51, // Canonicalize input lines. + XCASE: 52, // Enable input and output of uppercase characters by + // preceding their lowercase equivalents with "\". + ECHO: 53, // Enable echoing. + ECHOE: 54, // Visually erase chars. + ECHOK: 55, // Kill character discards current line. + ECHONL: 56, // Echo NL even if ECHO is off. + NOFLSH: 57, // Don't flush after interrupt. + TOSTOP: 58, // Stop background jobs from output. + IEXTEN: 59, // Enable extensions. + ECHOCTL: 60, // Echo control characters as ^(Char). + ECHOKE: 61, // Visual erase for line kill. + PENDIN: 62, // Retype pending input. + OPOST: 70, // Enable output processing. + OLCUC: 71, // Convert lowercase to uppercase. + ONLCR: 72, // Map NL to CR-NL. + OCRNL: 73, // Translate carriage return to newline (output). + ONOCR: 74, // Translate newline to carriage return-newline + // (output). + ONLRET: 75, // Newline performs a carriage return (output). + CS7: 90, // 7 bit mode. + CS8: 91, // 8 bit mode. + PARENB: 92, // Parity enable. + PARODD: 93, // Odd parity, else even. + TTY_OP_ISPEED: 128, // Specifies the input baud rate in bits per second. + TTY_OP_OSPEED: 129 // Specifies the output baud rate in bits per second. +}; +for (i = 0, keys = Object.keys(TERMINAL_MODE), len = keys.length; i < len; ++i) + TERMINAL_MODE[TERMINAL_MODE[keys[i]]] = keys[i]; + +var CHANNEL_EXTENDED_DATATYPE = exports.CHANNEL_EXTENDED_DATATYPE = { + STDERR: 1 +}; +for (i = 0, keys = Object.keys(CHANNEL_EXTENDED_DATATYPE), len = keys.length; + i < len; + ++i) { + CHANNEL_EXTENDED_DATATYPE[CHANNEL_EXTENDED_DATATYPE[keys[i]]] = keys[i]; +} + +exports.SIGNALS = ['ABRT', 'ALRM', 'FPE', 'HUP', 'ILL', 'INT', + 'QUIT', 'SEGV', 'TERM', 'USR1', 'USR2', 'KILL', + 'PIPE']; + +var DEFAULT_KEX = [ + 'diffie-hellman-group14-sha1' // REQUIRED +]; +var SUPPORTED_KEX = [ + 'diffie-hellman-group1-sha1' // REQUIRED +]; +if (semver.gte(process.version, '0.11.12')) { + // https://tools.ietf.org/html/rfc4419#section-4 + DEFAULT_KEX = [ + 'diffie-hellman-group-exchange-sha256' + ].concat(DEFAULT_KEX); + SUPPORTED_KEX = [ + 'diffie-hellman-group-exchange-sha1' + ].concat(SUPPORTED_KEX); +} +if (semver.gte(process.version, '0.11.14')) { + // https://tools.ietf.org/html/rfc5656#section-10.1 + DEFAULT_KEX = [ + 'ecdh-sha2-nistp256', + 'ecdh-sha2-nistp384', + 'ecdh-sha2-nistp521' + ].concat(DEFAULT_KEX); +} +var KEX_BUF = new Buffer(DEFAULT_KEX.join(','), 'ascii'); +SUPPORTED_KEX = DEFAULT_KEX.concat(SUPPORTED_KEX); + +var DEFAULT_SERVER_HOST_KEY = [ + 'ssh-rsa' +]; +var SUPPORTED_SERVER_HOST_KEY = [ + 'ssh-dss' +]; +if (semver.gte(process.version, '5.2.0')) { + // ECDSA keys are only supported in v5.2.0+ because of a crypto change that + // made it possible to (efficiently) generate an ECDSA public key from a + // private key (commit nodejs/node#da5ac55c83eb2c09cfb3baf7875529e8f1113529) + DEFAULT_SERVER_HOST_KEY.push( + 'ecdsa-sha2-nistp256', + 'ecdsa-sha2-nistp384', + 'ecdsa-sha2-nistp521' + ); +} +var SERVER_HOST_KEY_BUF = new Buffer(DEFAULT_SERVER_HOST_KEY.join(','), + 'ascii'); +SUPPORTED_SERVER_HOST_KEY = DEFAULT_SERVER_HOST_KEY.concat( + SUPPORTED_SERVER_HOST_KEY +); + +var DEFAULT_CIPHER = []; +var SUPPORTED_CIPHER = [ + 'aes256-cbc', + 'aes192-cbc', + 'aes128-cbc', + 'blowfish-cbc', + '3des-cbc', + + // http://tools.ietf.org/html/rfc4345#section-4: + 'arcfour256', + 'arcfour128', + + 'cast128-cbc', + 'arcfour' +]; +if (semver.gte(process.version, '0.11.12')) { + // node v0.11.12 introduced support for setting AAD, which is needed for + // AES-GCM in SSH2 + DEFAULT_CIPHER = [ + // http://tools.ietf.org/html/rfc5647 + 'aes128-gcm', + 'aes128-gcm@openssh.com', + 'aes256-gcm', + 'aes256-gcm@openssh.com' + ].concat(DEFAULT_CIPHER); +} +DEFAULT_CIPHER = [ + // http://tools.ietf.org/html/rfc4344#section-4 + 'aes128-ctr', + 'aes192-ctr', + 'aes256-ctr' +].concat(DEFAULT_CIPHER); +var CIPHER_BUF = new Buffer(DEFAULT_CIPHER.join(','), 'ascii'); +SUPPORTED_CIPHER = DEFAULT_CIPHER.concat(SUPPORTED_CIPHER); + +var DEFAULT_HMAC = [ + 'hmac-sha2-256', + 'hmac-sha2-512', + 'hmac-sha1', +]; +var SUPPORTED_HMAC = [ + 'hmac-md5', + 'hmac-sha2-256-96', // first 96 bits of HMAC-SHA256 + 'hmac-sha2-512-96', // first 96 bits of HMAC-SHA512 + 'hmac-ripemd160', + 'hmac-sha1-96', // first 96 bits of HMAC-SHA1 + 'hmac-md5-96' // first 96 bits of HMAC-MD5 +]; +var HMAC_BUF = new Buffer(DEFAULT_HMAC.join(','), 'ascii'); +SUPPORTED_HMAC = DEFAULT_HMAC.concat(SUPPORTED_HMAC); + +var DEFAULT_COMPRESS = [ + 'none', + 'zlib@openssh.com', // ZLIB (LZ77) compression, except + // compression/decompression does not start until after + // successful user authentication + 'zlib' // ZLIB (LZ77) compression +]; +var SUPPORTED_COMPRESS = []; +var COMPRESS_BUF = new Buffer(DEFAULT_COMPRESS.join(','), 'ascii'); +SUPPORTED_COMPRESS = DEFAULT_COMPRESS.concat(SUPPORTED_COMPRESS); + +exports.ALGORITHMS = { + KEX: DEFAULT_KEX, + KEX_BUF: KEX_BUF, + SUPPORTED_KEX: SUPPORTED_KEX, + + SERVER_HOST_KEY: DEFAULT_SERVER_HOST_KEY, + SERVER_HOST_KEY_BUF: SERVER_HOST_KEY_BUF, + SUPPORTED_SERVER_HOST_KEY: SUPPORTED_SERVER_HOST_KEY, + + CIPHER: DEFAULT_CIPHER, + CIPHER_BUF: CIPHER_BUF, + SUPPORTED_CIPHER: SUPPORTED_CIPHER, + + HMAC: DEFAULT_HMAC, + HMAC_BUF: HMAC_BUF, + SUPPORTED_HMAC: SUPPORTED_HMAC, + + COMPRESS: DEFAULT_COMPRESS, + COMPRESS_BUF: COMPRESS_BUF, + SUPPORTED_COMPRESS: SUPPORTED_COMPRESS +}; +exports.SSH_TO_OPENSSL = { + // ECDH key exchange + 'ecdh-sha2-nistp256': 'prime256v1', // OpenSSL's name for 'secp256r1' + 'ecdh-sha2-nistp384': 'secp384r1', + 'ecdh-sha2-nistp521': 'secp521r1', + // Ciphers + 'aes128-gcm': 'aes-128-gcm', + 'aes256-gcm': 'aes-256-gcm', + 'aes128-gcm@openssh.com': 'aes-128-gcm', + 'aes256-gcm@openssh.com': 'aes-256-gcm', + '3des-cbc': 'des-ede3-cbc', + 'blowfish-cbc': 'bf-cbc', + 'aes256-cbc': 'aes-256-cbc', + 'aes192-cbc': 'aes-192-cbc', + 'aes128-cbc': 'aes-128-cbc', + 'idea-cbc': 'idea-cbc', + 'cast128-cbc': 'cast-cbc', + 'rijndael-cbc@lysator.liu.se': 'aes-256-cbc', + 'arcfour128': 'rc4', + 'arcfour256': 'rc4', + 'arcfour512': 'rc4', + 'arcfour': 'rc4', + 'camellia128-cbc': 'camellia-128-cbc', + 'camellia192-cbc': 'camellia-192-cbc', + 'camellia256-cbc': 'camellia-256-cbc', + 'camellia128-cbc@openssh.com': 'camellia-128-cbc', + 'camellia192-cbc@openssh.com': 'camellia-192-cbc', + 'camellia256-cbc@openssh.com': 'camellia-256-cbc', + '3des-ctr': 'des-ede3', + 'blowfish-ctr': 'bf-ecb', + 'aes256-ctr': 'aes-256-ctr', + 'aes192-ctr': 'aes-192-ctr', + 'aes128-ctr': 'aes-128-ctr', + 'cast128-ctr': 'cast5-ecb', + 'camellia128-ctr': 'camellia-128-ecb', + 'camellia192-ctr': 'camellia-192-ecb', + 'camellia256-ctr': 'camellia-256-ecb', + 'camellia128-ctr@openssh.com': 'camellia-128-ecb', + 'camellia192-ctr@openssh.com': 'camellia-192-ecb', + 'camellia256-ctr@openssh.com': 'camellia-256-ecb', + // HMAC + 'hmac-sha1-96': 'sha1', + 'hmac-sha1': 'sha1', + 'hmac-sha2-256': 'sha256', + 'hmac-sha2-256-96': 'sha256', + 'hmac-sha2-512': 'sha512', + 'hmac-sha2-512-96': 'sha512', + 'hmac-md5-96': 'md5', + 'hmac-md5': 'md5', + 'hmac-ripemd160': 'ripemd160' +}; + +var BUGS = exports.BUGS = { + BAD_DHGEX: 1, + OLD_EXIT: 2, + DYN_RPORT_BUG: 4 +}; + +exports.BUGGY_IMPLS = [ + [ 'Cisco-1.25', BUGS.BAD_DHGEX ], + [ /^[0-9.]+$/, BUGS.OLD_EXIT ], // old SSH.com implementations + [ /^OpenSSH_5\.\d+/, BUGS.DYN_RPORT_BUG ] +]; diff --git a/reverse_engineering/node_modules/ssh2-streams/lib/jsbn.js b/reverse_engineering/node_modules/ssh2-streams/lib/jsbn.js new file mode 100644 index 0000000..309fe0d --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/lib/jsbn.js @@ -0,0 +1,1186 @@ +// Copyright (c) 2005 Tom Wu +// All Rights Reserved. +// See "LICENSE" for details. + +// Basic JavaScript BN library - subset useful for RSA encryption. + +// Bits per digit +var dbits; + +// JavaScript engine analysis +var canary = 0xdeadbeefcafe; +var j_lm = ((canary&0xffffff)==0xefcafe); + +// (public) Constructor +function BigInteger(a,b,c) { + if(a != null) + if("number" == typeof a) this.fromNumber(a,b,c); + else if(b == null && "string" != typeof a) this.fromString(a,256); + else this.fromString(a,b); +} + +// return new, unset BigInteger +function nbi() { return new BigInteger(null); } + +// am: Compute w_j += (x*this_i), propagate carries, +// c is initial carry, returns final carry. +// c < 3*dvalue, x < 2*dvalue, this_i < dvalue +// We need to select the fastest one that works in this environment. + +// Set max digit bits to 28 since some +// browsers slow down when dealing with 32-bit numbers. +function am3(i,x,w,j,c,n) { + var xl = x&0x3fff, xh = x>>14; + while(--n >= 0) { + var l = this[i]&0x3fff; + var h = this[i++]>>14; + var m = xh*l+h*xl; + l = xl*l+((m&0x3fff)<<14)+w[j]+c; + c = (l>>28)+(m>>14)+xh*h; + w[j++] = l&0xfffffff; + } + return c; +} +BigInteger.prototype.am = am3; +dbits = 28; + +BigInteger.prototype.DB = dbits; +BigInteger.prototype.DM = ((1<= 0; --i) r[i] = this[i]; + r.t = this.t; + r.s = this.s; +} + +// (protected) set from integer value x, -DV <= x < DV +function bnpFromInt(x) { + this.t = 1; + this.s = (x<0)?-1:0; + if(x > 0) this[0] = x; + else if(x < -1) this[0] = x+this.DV; + else this.t = 0; +} + +// return bigint initialized to value +function nbv(i) { var r = nbi(); r.fromInt(i); return r; } + +// (protected) set from string and radix +function bnpFromString(s,b) { + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 256) k = 8; // byte array + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else { this.fromRadix(s,b); return; } + this.t = 0; + this.s = 0; + var i = s.length, mi = false, sh = 0; + while(--i >= 0) { + var x = (k==8)?s[i]&0xff:intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-") mi = true; + continue; + } + mi = false; + if(sh == 0) + this[this.t++] = x; + else if(sh+k > this.DB) { + this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh)); + } + else + this[this.t-1] |= x<= this.DB) sh -= this.DB; + } + if(k == 8 && (s[0]&0x80) != 0) { + this.s = -1; + if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this[this.t-1] == c) --this.t; +} + +// (public) return string representation in given radix +function bnToString(b) { + if(this.s < 0) return "-"+this.negate().toString(b); + var k; + if(b == 16) k = 4; + else if(b == 8) k = 3; + else if(b == 2) k = 1; + else if(b == 32) k = 5; + else if(b == 4) k = 2; + else return this.toRadix(b); + var km = (1< 0) { + if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); } + while(i >= 0) { + if(p < k) { + d = (this[i]&((1<>(p+=this.DB-k); + } + else { + d = (this[i]>>(p-=k))&km; + if(p <= 0) { p += this.DB; --i; } + } + if(d > 0) m = true; + if(m) r += int2char(d); + } + } + return m?r:"0"; +} + +// (public) -this +function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; } + +// (public) |this| +function bnAbs() { return (this.s<0)?this.negate():this; } + +// (public) return + if this > a, - if this < a, 0 if equal +function bnCompareTo(a) { + var r = this.s-a.s; + if(r != 0) return r; + var i = this.t; + r = i-a.t; + if(r != 0) return (this.s<0)?-r:r; + while(--i >= 0) if((r=this[i]-a[i]) != 0) return r; + return 0; +} + +// returns bit length of the integer x +function nbits(x) { + var r = 1, t; + if((t=x>>>16) != 0) { x = t; r += 16; } + if((t=x>>8) != 0) { x = t; r += 8; } + if((t=x>>4) != 0) { x = t; r += 4; } + if((t=x>>2) != 0) { x = t; r += 2; } + if((t=x>>1) != 0) { x = t; r += 1; } + return r; +} + +// (public) return the number of bits in "this" +function bnBitLength() { + if(this.t <= 0) return 0; + return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM)); +} + +// (protected) r = this << n*DB +function bnpDLShiftTo(n,r) { + var i; + for(i = this.t-1; i >= 0; --i) r[i+n] = this[i]; + for(i = n-1; i >= 0; --i) r[i] = 0; + r.t = this.t+n; + r.s = this.s; +} + +// (protected) r = this >> n*DB +function bnpDRShiftTo(n,r) { + for(var i = n; i < this.t; ++i) r[i-n] = this[i]; + r.t = Math.max(this.t-n,0); + r.s = this.s; +} + +// (protected) r = this << n +function bnpLShiftTo(n,r) { + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<= 0; --i) { + r[i+ds+1] = (this[i]>>cbs)|c; + c = (this[i]&bm)<= 0; --i) r[i] = 0; + r[ds] = c; + r.t = this.t+ds+1; + r.s = this.s; + r.clamp(); +} + +// (protected) r = this >> n +function bnpRShiftTo(n,r) { + r.s = this.s; + var ds = Math.floor(n/this.DB); + if(ds >= this.t) { r.t = 0; return; } + var bs = n%this.DB; + var cbs = this.DB-bs; + var bm = (1<>bs; + for(var i = ds+1; i < this.t; ++i) { + r[i-ds-1] |= (this[i]&bm)<>bs; + } + if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB; + } + if(a.t < this.t) { + c -= a.s; + while(i < this.t) { + c += this[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } + else { + c += this.s; + while(i < a.t) { + c -= a[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c -= a.s; + } + r.s = (c<0)?-1:0; + if(c < -1) r[i++] = this.DV+c; + else if(c > 0) r[i++] = c; + r.t = i; + r.clamp(); +} + +// (protected) r = this * a, r != this,a (HAC 14.12) +// "this" should be the larger one if appropriate. +function bnpMultiplyTo(a,r) { + var x = this.abs(), y = a.abs(); + var i = x.t; + r.t = i+y.t; + while(--i >= 0) r[i] = 0; + for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t); + r.s = 0; + r.clamp(); + if(this.s != a.s) BigInteger.ZERO.subTo(r,r); +} + +// (protected) r = this^2, r != this (HAC 14.16) +function bnpSquareTo(r) { + var x = this.abs(); + var i = r.t = 2*x.t; + while(--i >= 0) r[i] = 0; + for(i = 0; i < x.t-1; ++i) { + var c = x.am(i,x[i],r,2*i,0,1); + if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) { + r[i+x.t] -= x.DV; + r[i+x.t+1] = 1; + } + } + if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1); + r.s = 0; + r.clamp(); +} + +// (protected) divide this by m, quotient and remainder to q, r (HAC 14.20) +// r != q, this != m. q or r may be null. +function bnpDivRemTo(m,q,r) { + var pm = m.abs(); + if(pm.t <= 0) return; + var pt = this.abs(); + if(pt.t < pm.t) { + if(q != null) q.fromInt(0); + if(r != null) this.copyTo(r); + return; + } + if(r == null) r = nbi(); + var y = nbi(), ts = this.s, ms = m.s; + var nsh = this.DB-nbits(pm[pm.t-1]); // normalize modulus + if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); } + else { pm.copyTo(y); pt.copyTo(r); } + var ys = y.t; + var y0 = y[ys-1]; + if(y0 == 0) return; + var yt = y0*(1<1)?y[ys-2]>>this.F2:0); + var d1 = this.FV/yt, d2 = (1<= 0) { + r[r.t++] = 1; + r.subTo(t,r); + } + BigInteger.ONE.dlShiftTo(ys,t); + t.subTo(y,y); // "negative" y so we can replace sub with am later + while(y.t < ys) y[y.t++] = 0; + while(--j >= 0) { + // Estimate quotient digit + var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2); + if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out + y.dlShiftTo(j,t); + r.subTo(t,r); + while(r[i] < --qd) r.subTo(t,r); + } + } + if(q != null) { + r.drShiftTo(ys,q); + if(ts != ms) BigInteger.ZERO.subTo(q,q); + } + r.t = ys; + r.clamp(); + if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder + if(ts < 0) BigInteger.ZERO.subTo(r,r); +} + +// (public) this mod a +function bnMod(a) { + var r = nbi(); + this.abs().divRemTo(a,null,r); + if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r); + return r; +} + +// Modular reduction using "classic" algorithm +function Classic(m) { this.m = m; } +function cConvert(x) { + if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m); + else return x; +} +function cRevert(x) { return x; } +function cReduce(x) { x.divRemTo(this.m,null,x); } +function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } +function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + +Classic.prototype.convert = cConvert; +Classic.prototype.revert = cRevert; +Classic.prototype.reduce = cReduce; +Classic.prototype.mulTo = cMulTo; +Classic.prototype.sqrTo = cSqrTo; + +// (protected) return "-1/this % 2^DB"; useful for Mont. reduction +// justification: +// xy == 1 (mod m) +// xy = 1+km +// xy(2-xy) = (1+km)(1-km) +// x[y(2-xy)] = 1-k^2m^2 +// x[y(2-xy)] == 1 (mod m^2) +// if y is 1/x mod m, then y(2-xy) is 1/x mod m^2 +// should reduce x and y(2-xy) by m^2 at each step to keep size bounded. +// JS multiply "overflows" differently from C/C++, so care is needed here. +function bnpInvDigit() { + if(this.t < 1) return 0; + var x = this[0]; + if((x&1) == 0) return 0; + var y = x&3; // y == 1/x mod 2^2 + y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4 + y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8 + y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16 + // last step - calculate inverse mod DV directly; + // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints + y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits + // we really want the negative inverse, and -DV < y < DV + return (y>0)?this.DV-y:-y; +} + +// Montgomery reduction +function Montgomery(m) { + this.m = m; + this.mp = m.invDigit(); + this.mpl = this.mp&0x7fff; + this.mph = this.mp>>15; + this.um = (1<<(m.DB-15))-1; + this.mt2 = 2*m.t; +} + +// xR mod m +function montConvert(x) { + var r = nbi(); + x.abs().dlShiftTo(this.m.t,r); + r.divRemTo(this.m,null,r); + if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r); + return r; +} + +// x/R mod m +function montRevert(x) { + var r = nbi(); + x.copyTo(r); + this.reduce(r); + return r; +} + +// x = x/R mod m (HAC 14.32) +function montReduce(x) { + while(x.t <= this.mt2) // pad x so am has enough room later + x[x.t++] = 0; + for(var i = 0; i < this.m.t; ++i) { + // faster way of calculating u0 = x[i]*mp mod DV + var j = x[i]&0x7fff; + var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM; + // use am to combine the multiply-shift-add into one call + j = i+this.m.t; + x[j] += this.m.am(0,u0,x,i,0,this.m.t); + // propagate carry + while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; } + } + x.clamp(); + x.drShiftTo(this.m.t,x); + if(x.compareTo(this.m) >= 0) x.subTo(this.m,x); +} + +// r = "x^2/R mod m"; x != r +function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + +// r = "xy/R mod m"; x,y != r +function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + +Montgomery.prototype.convert = montConvert; +Montgomery.prototype.revert = montRevert; +Montgomery.prototype.reduce = montReduce; +Montgomery.prototype.mulTo = montMulTo; +Montgomery.prototype.sqrTo = montSqrTo; + +// (protected) true iff this is even +function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; } + +// (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79) +function bnpExp(e,z) { + if(e > 0xffffffff || e < 1) return BigInteger.ONE; + var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1; + g.copyTo(r); + while(--i >= 0) { + z.sqrTo(r,r2); + if((e&(1< 0) z.mulTo(r2,g,r); + else { var t = r; r = r2; r2 = t; } + } + return z.revert(r); +} + +// (public) this^e % m, 0 <= e < 2^32 +function bnModPowInt(e,m) { + var z; + if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m); + return this.exp(e,z); +} + +// protected +BigInteger.prototype.copyTo = bnpCopyTo; +BigInteger.prototype.fromInt = bnpFromInt; +BigInteger.prototype.fromString = bnpFromString; +BigInteger.prototype.clamp = bnpClamp; +BigInteger.prototype.dlShiftTo = bnpDLShiftTo; +BigInteger.prototype.drShiftTo = bnpDRShiftTo; +BigInteger.prototype.lShiftTo = bnpLShiftTo; +BigInteger.prototype.rShiftTo = bnpRShiftTo; +BigInteger.prototype.subTo = bnpSubTo; +BigInteger.prototype.multiplyTo = bnpMultiplyTo; +BigInteger.prototype.squareTo = bnpSquareTo; +BigInteger.prototype.divRemTo = bnpDivRemTo; +BigInteger.prototype.invDigit = bnpInvDigit; +BigInteger.prototype.isEven = bnpIsEven; +BigInteger.prototype.exp = bnpExp; + +// public +BigInteger.prototype.toString = bnToString; +BigInteger.prototype.negate = bnNegate; +BigInteger.prototype.abs = bnAbs; +BigInteger.prototype.compareTo = bnCompareTo; +BigInteger.prototype.bitLength = bnBitLength; +BigInteger.prototype.mod = bnMod; +BigInteger.prototype.modPowInt = bnModPowInt; + +// "constants" +BigInteger.ZERO = nbv(0); +BigInteger.ONE = nbv(1); + +// Copyright (c) 2005-2009 Tom Wu +// All Rights Reserved. +// See "LICENSE" for details. + +// Extended JavaScript BN functions, required for RSA private ops. + +// Version 1.1: new BigInteger("0", 10) returns "proper" zero +// Version 1.2: square() API, isProbablePrime fix + +// (public) +function bnClone() { var r = nbi(); this.copyTo(r); return r; } + +// (public) return value as integer +function bnIntValue() { + if(this.s < 0) { + if(this.t == 1) return this[0]-this.DV; + else if(this.t == 0) return -1; + } + else if(this.t == 1) return this[0]; + else if(this.t == 0) return 0; + // assumes 16 < DB < 32 + return ((this[1]&((1<<(32-this.DB))-1))<>24; } + +// (public) return value as short (assumes DB>=16) +function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; } + +// (protected) return x s.t. r^x < DV +function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); } + +// (public) 0 if this == 0, 1 if this > 0 +function bnSigNum() { + if(this.s < 0) return -1; + else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0; + else return 1; +} + +// (protected) convert to radix string +function bnpToRadix(b) { + if(b == null) b = 10; + if(this.signum() == 0 || b < 2 || b > 36) return "0"; + var cs = this.chunkSize(b); + var a = Math.pow(b,cs); + var d = nbv(a), y = nbi(), z = nbi(), r = ""; + this.divRemTo(d,y,z); + while(y.signum() > 0) { + r = (a+z.intValue()).toString(b).substr(1) + r; + y.divRemTo(d,y,z); + } + return z.intValue().toString(b) + r; +} + +// (protected) convert from radix string +function bnpFromRadix(s,b) { + this.fromInt(0); + if(b == null) b = 10; + var cs = this.chunkSize(b); + var d = Math.pow(b,cs), mi = false, j = 0, w = 0; + for(var i = 0; i < s.length; ++i) { + var x = intAt(s,i); + if(x < 0) { + if(s.charAt(i) == "-" && this.signum() == 0) mi = true; + continue; + } + w = b*w+x; + if(++j >= cs) { + this.dMultiply(d); + this.dAddOffset(w,0); + j = 0; + w = 0; + } + } + if(j > 0) { + this.dMultiply(Math.pow(b,j)); + this.dAddOffset(w,0); + } + if(mi) BigInteger.ZERO.subTo(this,this); +} + +// (protected) alternate constructor +function bnpFromNumber(a,b,c) { + if("number" == typeof b) { + // new BigInteger(int,int,RNG) + if(a < 2) this.fromInt(1); + else { + this.fromNumber(a,c); + if(!this.testBit(a-1)) // force MSB set + this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this); + if(this.isEven()) this.dAddOffset(1,0); // force odd + while(!this.isProbablePrime(b)) { + this.dAddOffset(2,0); + if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this); + } + } + } + else { + // new BigInteger(int,RNG) + var x = new Array(), t = a&7; + x.length = (a>>3)+1; + b.nextBytes(x); + if(t > 0) x[0] &= ((1< 0) { + if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p) + r[k++] = d|(this.s<<(this.DB-p)); + while(i >= 0) { + if(p < 8) { + d = (this[i]&((1<>(p+=this.DB-8); + } + else { + d = (this[i]>>(p-=8))&0xff; + if(p <= 0) { p += this.DB; --i; } + } + if((d&0x80) != 0) d |= -256; + if(k == 0 && (this.s&0x80) != (d&0x80)) ++k; + if(k > 0 || d != this.s) r[k++] = d; + } + } + return r; +} + +function bnEquals(a) { return(this.compareTo(a)==0); } +function bnMin(a) { return(this.compareTo(a)<0)?this:a; } +function bnMax(a) { return(this.compareTo(a)>0)?this:a; } + +// (protected) r = this op a (bitwise) +function bnpBitwiseTo(a,op,r) { + var i, f, m = Math.min(a.t,this.t); + for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]); + if(a.t < this.t) { + f = a.s&this.DM; + for(i = m; i < this.t; ++i) r[i] = op(this[i],f); + r.t = this.t; + } + else { + f = this.s&this.DM; + for(i = m; i < a.t; ++i) r[i] = op(f,a[i]); + r.t = a.t; + } + r.s = op(this.s,a.s); + r.clamp(); +} + +// (public) this & a +function op_and(x,y) { return x&y; } +function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; } + +// (public) this | a +function op_or(x,y) { return x|y; } +function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; } + +// (public) this ^ a +function op_xor(x,y) { return x^y; } +function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; } + +// (public) this & ~a +function op_andnot(x,y) { return x&~y; } +function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; } + +// (public) ~this +function bnNot() { + var r = nbi(); + for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i]; + r.t = this.t; + r.s = ~this.s; + return r; +} + +// (public) this << n +function bnShiftLeft(n) { + var r = nbi(); + if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r); + return r; +} + +// (public) this >> n +function bnShiftRight(n) { + var r = nbi(); + if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r); + return r; +} + +// return index of lowest 1-bit in x, x < 2^31 +function lbit(x) { + if(x == 0) return -1; + var r = 0; + if((x&0xffff) == 0) { x >>= 16; r += 16; } + if((x&0xff) == 0) { x >>= 8; r += 8; } + if((x&0xf) == 0) { x >>= 4; r += 4; } + if((x&3) == 0) { x >>= 2; r += 2; } + if((x&1) == 0) ++r; + return r; +} + +// (public) returns index of lowest 1-bit (or -1 if none) +function bnGetLowestSetBit() { + for(var i = 0; i < this.t; ++i) + if(this[i] != 0) return i*this.DB+lbit(this[i]); + if(this.s < 0) return this.t*this.DB; + return -1; +} + +// return number of 1 bits in x +function cbit(x) { + var r = 0; + while(x != 0) { x &= x-1; ++r; } + return r; +} + +// (public) return number of set bits +function bnBitCount() { + var r = 0, x = this.s&this.DM; + for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x); + return r; +} + +// (public) true iff nth bit is set +function bnTestBit(n) { + var j = Math.floor(n/this.DB); + if(j >= this.t) return(this.s!=0); + return((this[j]&(1<<(n%this.DB)))!=0); +} + +// (protected) this op (1<>= this.DB; + } + if(a.t < this.t) { + c += a.s; + while(i < this.t) { + c += this[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += this.s; + } + else { + c += this.s; + while(i < a.t) { + c += a[i]; + r[i++] = c&this.DM; + c >>= this.DB; + } + c += a.s; + } + r.s = (c<0)?-1:0; + if(c > 0) r[i++] = c; + else if(c < -1) r[i++] = this.DV+c; + r.t = i; + r.clamp(); +} + +// (public) this + a +function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; } + +// (public) this - a +function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; } + +// (public) this * a +function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; } + +// (public) this^2 +function bnSquare() { var r = nbi(); this.squareTo(r); return r; } + +// (public) this / a +function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; } + +// (public) this % a +function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; } + +// (public) [this/a,this%a] +function bnDivideAndRemainder(a) { + var q = nbi(), r = nbi(); + this.divRemTo(a,q,r); + return new Array(q,r); +} + +// (protected) this *= n, this >= 0, 1 < n < DV +function bnpDMultiply(n) { + this[this.t] = this.am(0,n-1,this,0,0,this.t); + ++this.t; + this.clamp(); +} + +// (protected) this += n << w words, this >= 0 +function bnpDAddOffset(n,w) { + if(n == 0) return; + while(this.t <= w) this[this.t++] = 0; + this[w] += n; + while(this[w] >= this.DV) { + this[w] -= this.DV; + if(++w >= this.t) this[this.t++] = 0; + ++this[w]; + } +} + +// A "null" reducer +function NullExp() {} +function nNop(x) { return x; } +function nMulTo(x,y,r) { x.multiplyTo(y,r); } +function nSqrTo(x,r) { x.squareTo(r); } + +NullExp.prototype.convert = nNop; +NullExp.prototype.revert = nNop; +NullExp.prototype.mulTo = nMulTo; +NullExp.prototype.sqrTo = nSqrTo; + +// (public) this^e +function bnPow(e) { return this.exp(e,new NullExp()); } + +// (protected) r = lower n words of "this * a", a.t <= n +// "this" should be the larger one if appropriate. +function bnpMultiplyLowerTo(a,n,r) { + var i = Math.min(this.t+a.t,n); + r.s = 0; // assumes a,this >= 0 + r.t = i; + while(i > 0) r[--i] = 0; + var j; + for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t); + for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i); + r.clamp(); +} + +// (protected) r = "this * a" without lower n words, n > 0 +// "this" should be the larger one if appropriate. +function bnpMultiplyUpperTo(a,n,r) { + --n; + var i = r.t = this.t+a.t-n; + r.s = 0; // assumes a,this >= 0 + while(--i >= 0) r[i] = 0; + for(i = Math.max(n-this.t,0); i < a.t; ++i) + r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n); + r.clamp(); + r.drShiftTo(1,r); +} + +// Barrett modular reduction +function Barrett(m) { + // setup Barrett + this.r2 = nbi(); + this.q3 = nbi(); + BigInteger.ONE.dlShiftTo(2*m.t,this.r2); + this.mu = this.r2.divide(m); + this.m = m; +} + +function barrettConvert(x) { + if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m); + else if(x.compareTo(this.m) < 0) return x; + else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; } +} + +function barrettRevert(x) { return x; } + +// x = x mod m (HAC 14.42) +function barrettReduce(x) { + x.drShiftTo(this.m.t-1,this.r2); + if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); } + this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3); + this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2); + while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1); + x.subTo(this.r2,x); + while(x.compareTo(this.m) >= 0) x.subTo(this.m,x); +} + +// r = x^2 mod m; x != r +function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); } + +// r = x*y mod m; x,y != r +function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); } + +Barrett.prototype.convert = barrettConvert; +Barrett.prototype.revert = barrettRevert; +Barrett.prototype.reduce = barrettReduce; +Barrett.prototype.mulTo = barrettMulTo; +Barrett.prototype.sqrTo = barrettSqrTo; + +// (public) this^e % m (HAC 14.85) +function bnModPow(e,m) { + var i = e.bitLength(), k, r = nbv(1), z; + if(i <= 0) return r; + else if(i < 18) k = 1; + else if(i < 48) k = 3; + else if(i < 144) k = 4; + else if(i < 768) k = 5; + else k = 6; + if(i < 8) + z = new Classic(m); + else if(m.isEven()) + z = new Barrett(m); + else + z = new Montgomery(m); + + // precomputation + var g = new Array(), n = 3, k1 = k-1, km = (1< 1) { + var g2 = nbi(); + z.sqrTo(g[1],g2); + while(n <= km) { + g[n] = nbi(); + z.mulTo(g2,g[n-2],g[n]); + n += 2; + } + } + + var j = e.t-1, w, is1 = true, r2 = nbi(), t; + i = nbits(e[j])-1; + while(j >= 0) { + if(i >= k1) w = (e[j]>>(i-k1))&km; + else { + w = (e[j]&((1<<(i+1))-1))<<(k1-i); + if(j > 0) w |= e[j-1]>>(this.DB+i-k1); + } + + n = k; + while((w&1) == 0) { w >>= 1; --n; } + if((i -= n) < 0) { i += this.DB; --j; } + if(is1) { // ret == 1, don't bother squaring or multiplying it + g[w].copyTo(r); + is1 = false; + } + else { + while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; } + if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; } + z.mulTo(r2,g[w],r); + } + + while(j >= 0 && (e[j]&(1< 0) { + x.rShiftTo(g,x); + y.rShiftTo(g,y); + } + while(x.signum() > 0) { + if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x); + if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y); + if(x.compareTo(y) >= 0) { + x.subTo(y,x); + x.rShiftTo(1,x); + } + else { + y.subTo(x,y); + y.rShiftTo(1,y); + } + } + if(g > 0) y.lShiftTo(g,y); + return y; +} + +// (protected) this % n, n < 2^26 +function bnpModInt(n) { + if(n <= 0) return 0; + var d = this.DV%n, r = (this.s<0)?n-1:0; + if(this.t > 0) + if(d == 0) r = this[0]%n; + else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n; + return r; +} + +// (public) 1/this % m (HAC 14.61) +function bnModInverse(m) { + var ac = m.isEven(); + if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO; + var u = m.clone(), v = this.clone(); + var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1); + while(u.signum() != 0) { + while(u.isEven()) { + u.rShiftTo(1,u); + if(ac) { + if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); } + a.rShiftTo(1,a); + } + else if(!b.isEven()) b.subTo(m,b); + b.rShiftTo(1,b); + } + while(v.isEven()) { + v.rShiftTo(1,v); + if(ac) { + if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); } + c.rShiftTo(1,c); + } + else if(!d.isEven()) d.subTo(m,d); + d.rShiftTo(1,d); + } + if(u.compareTo(v) >= 0) { + u.subTo(v,u); + if(ac) a.subTo(c,a); + b.subTo(d,b); + } + else { + v.subTo(u,v); + if(ac) c.subTo(a,c); + d.subTo(b,d); + } + } + if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO; + if(d.compareTo(m) >= 0) return d.subtract(m); + if(d.signum() < 0) d.addTo(m,d); else return d; + if(d.signum() < 0) return d.add(m); else return d; +} + +var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997]; +var lplim = (1<<26)/lowprimes[lowprimes.length-1]; + +// (public) test primality with certainty >= 1-.5^t +function bnIsProbablePrime(t) { + var i, x = this.abs(); + if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) { + for(i = 0; i < lowprimes.length; ++i) + if(x[0] == lowprimes[i]) return true; + return false; + } + if(x.isEven()) return false; + i = 1; + while(i < lowprimes.length) { + var m = lowprimes[i], j = i+1; + while(j < lowprimes.length && m < lplim) m *= lowprimes[j++]; + m = x.modInt(m); + while(i < j) if(m%lowprimes[i++] == 0) return false; + } + return x.millerRabin(t); +} + +// (protected) true if probably prime (HAC 4.24, Miller-Rabin) +function bnpMillerRabin(t) { + var n1 = this.subtract(BigInteger.ONE); + var k = n1.getLowestSetBit(); + if(k <= 0) return false; + var r = n1.shiftRight(k); + t = (t+1)>>1; + if(t > lowprimes.length) t = lowprimes.length; + var a = nbi(); + for(var i = 0; i < t; ++i) { + //Pick bases at random, instead of starting at 2 + a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]); + var y = a.modPow(r,this); + if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) { + var j = 1; + while(j++ < k && y.compareTo(n1) != 0) { + y = y.modPowInt(2,this); + if(y.compareTo(BigInteger.ONE) == 0) return false; + } + if(y.compareTo(n1) != 0) return false; + } + } + return true; +} + +// protected +BigInteger.prototype.chunkSize = bnpChunkSize; +BigInteger.prototype.toRadix = bnpToRadix; +BigInteger.prototype.fromRadix = bnpFromRadix; +BigInteger.prototype.fromNumber = bnpFromNumber; +BigInteger.prototype.bitwiseTo = bnpBitwiseTo; +BigInteger.prototype.changeBit = bnpChangeBit; +BigInteger.prototype.addTo = bnpAddTo; +BigInteger.prototype.dMultiply = bnpDMultiply; +BigInteger.prototype.dAddOffset = bnpDAddOffset; +BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo; +BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo; +BigInteger.prototype.modInt = bnpModInt; +BigInteger.prototype.millerRabin = bnpMillerRabin; + +// public +BigInteger.prototype.clone = bnClone; +BigInteger.prototype.intValue = bnIntValue; +BigInteger.prototype.byteValue = bnByteValue; +BigInteger.prototype.shortValue = bnShortValue; +BigInteger.prototype.signum = bnSigNum; +BigInteger.prototype.toByteArray = bnToByteArray; +BigInteger.prototype.equals = bnEquals; +BigInteger.prototype.min = bnMin; +BigInteger.prototype.max = bnMax; +BigInteger.prototype.and = bnAnd; +BigInteger.prototype.or = bnOr; +BigInteger.prototype.xor = bnXor; +BigInteger.prototype.andNot = bnAndNot; +BigInteger.prototype.not = bnNot; +BigInteger.prototype.shiftLeft = bnShiftLeft; +BigInteger.prototype.shiftRight = bnShiftRight; +BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit; +BigInteger.prototype.bitCount = bnBitCount; +BigInteger.prototype.testBit = bnTestBit; +BigInteger.prototype.setBit = bnSetBit; +BigInteger.prototype.clearBit = bnClearBit; +BigInteger.prototype.flipBit = bnFlipBit; +BigInteger.prototype.add = bnAdd; +BigInteger.prototype.subtract = bnSubtract; +BigInteger.prototype.multiply = bnMultiply; +BigInteger.prototype.divide = bnDivide; +BigInteger.prototype.remainder = bnRemainder; +BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder; +BigInteger.prototype.modPow = bnModPow; +BigInteger.prototype.modInverse = bnModInverse; +BigInteger.prototype.pow = bnPow; +BigInteger.prototype.gcd = bnGCD; +BigInteger.prototype.isProbablePrime = bnIsProbablePrime; + +// JSBN-specific extension +BigInteger.prototype.square = bnSquare; + +// Expose the Barrett function +BigInteger.prototype.Barrett = Barrett + +// BigInteger interfaces not implemented in jsbn: + +// BigInteger(int signum, byte[] magnitude) +// double doubleValue() +// float floatValue() +// int hashCode() +// long longValue() +// static BigInteger valueOf(long val) + +module.exports = BigInteger; + diff --git a/reverse_engineering/node_modules/ssh2-streams/lib/keyParser.js b/reverse_engineering/node_modules/ssh2-streams/lib/keyParser.js new file mode 100644 index 0000000..937f08d --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/lib/keyParser.js @@ -0,0 +1,239 @@ +// TODO: +// * handle multi-line header values (OpenSSH)? +// * more thorough validation? + +var utils; +var Ber = require('asn1').Ber; +var semver = require('semver'); + +var RE_PPK = /^PuTTY-User-Key-File-2: ssh-(rsa|dss)\r?\nEncryption: (aes256-cbc|none)\r?\nComment: ([^\r\n]*)\r?\nPublic-Lines: \d+\r?\n([\s\S]+?)\r?\nPrivate-Lines: \d+\r?\n([\s\S]+?)\r?\nPrivate-MAC: ([^\r\n]+)/; +var RE_HEADER_OPENSSH_PRIV = /^-----BEGIN (RSA|DSA|EC) PRIVATE KEY-----$/i; +var RE_FOOTER_OPENSSH_PRIV = /^-----END (?:RSA|DSA|EC) PRIVATE KEY-----$/i; +var RE_HEADER_OPENSSH_PUB = /^((?:(?:ssh-(rsa|dss))|ecdsa-sha2-nistp(256|384|521))(?:-cert-v0[01]@openssh.com)?) ([A-Z0-9a-z\/+=]+)(?:$|\s+([\S].*)?)$/i; +var RE_HEADER_RFC4716_PUB = /^---- BEGIN SSH2 PUBLIC KEY ----$/i; +var RE_FOOTER_RFC4716_PUB = /^---- END SSH2 PUBLIC KEY ----$/i; +var RE_HEADER_OPENSSH = /^([^:]+):\s*([\S].*)?$/i; +var RE_HEADER_RFC4716 = /^([^:]+): (.*)?$/i; + +module.exports = function(data) { + if (Buffer.isBuffer(data)) + data = data.toString('utf8'); + else if (typeof data !== 'string') + return new Error('Key data must be a Buffer or string'); + + var ret = { + fulltype: undefined, + type: undefined, + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: undefined, + publicOrig: undefined + }; + var m; + var i; + var len; + + data = data.trim().split(/\r\n|\n/); + + while (!data[0].length) + data.shift(); + while (!data.slice(-1)[0].length) + data.pop(); + + var orig = data.join('\n'); + + if ((m = RE_HEADER_OPENSSH_PRIV.exec(data[0])) + && RE_FOOTER_OPENSSH_PRIV.test(data.slice(-1))) { + // OpenSSH private key + var keyType = m[1].toLowerCase(); + if (keyType === 'dsa') + keyType = 'dss'; + + if (keyType === 'ec' && semver.lt(process.version, '5.2.0')) { + return new Error( + 'EC private keys are not supported in this version of node' + ); + } + + if (!RE_HEADER_OPENSSH.test(data[1])) { + // unencrypted, no headers + var privData = new Buffer(data.slice(1, -1).join(''), 'base64'); + if (keyType !== 'ec') { + ret.fulltype = 'ssh-' + keyType; + } else { + // ECDSA + var asnReader = new Ber.Reader(privData); + asnReader.readSequence(); + asnReader.readInt(); + asnReader.readString(Ber.OctetString, true); + asnReader.readByte(); // Skip "complex" context type byte + var offset = asnReader.readLength(); // Skip context length + if (offset !== null) { + asnReader._offset = offset; + switch (asnReader.readOID()) { + case '1.2.840.10045.3.1.7': + // prime256v1/secp256r1 + ret.fulltype = 'ecdsa-sha2-nistp256'; + break; + case '1.3.132.0.34': + // secp384r1 + ret.fulltype = 'ecdsa-sha2-nistp384'; + break; + case '1.3.132.0.35': + // secp521r1 + ret.fulltype = 'ecdsa-sha2-nistp521'; + break; + } + } + if (ret.fulltype === undefined) + return new Error('Unsupported EC private key type'); + } + ret.private = privData; + } else { + // possibly encrypted, headers + for (i = 1, len = data.length; i < len; ++i) { + m = RE_HEADER_OPENSSH.exec(data[i]); + if (m) { + m[1] = m[1].toLowerCase(); + if (m[1] === 'dek-info') { + m[2] = m[2].split(','); + ret.encryption = m[2][0].toLowerCase(); + if (m[2].length > 1) + ret.extra = m[2].slice(1); + } + } else if (data[i].length) + break; + } + ret.private = new Buffer(data.slice(i, -1).join(''), 'base64'); + } + ret.type = keyType; + ret.privateOrig = new Buffer(orig); + } else if (m = RE_HEADER_OPENSSH_PUB.exec(data[0])) { + // OpenSSH public key + ret.fulltype = m[1]; + ret.type = (m[2] || 'ec').toLowerCase(); + ret.public = new Buffer(m[4], 'base64'); + ret.publicOrig = new Buffer(orig); + ret.comment = m[5]; + if (m[3]) // ECDSA only + ret.curve = 'nistp' + m[3]; + } else if (RE_HEADER_RFC4716_PUB.test(data[0]) + && RE_FOOTER_RFC4716_PUB.test(data.slice(-1))) { + if (data[1].indexOf(': ') === -1) { + // no headers + ret.public = new Buffer(data.slice(1, -1).join(''), 'base64'); + } else { + // headers + for (i = 1, len = data.length; i < len; ++i) { + if (data[i].indexOf(': ') === -1) { + if (data[i].length) + break; // start of key data + else + continue; // empty line + } + while (data[i].substr(-1) === '\\') { + if (i + 1 < len) { + data[i] = data[i].slice(0, -1) + data[i + 1]; + data.splice(i + 1, 1); + --len; + } else + return new Error('RFC4716 public key missing header continuation line'); + } + m = RE_HEADER_RFC4716.exec(data[i]); + if (m) { + m[1] = m[1].toLowerCase(); + if (m[1] === 'comment') { + ret.comment = m[2] || ''; + if (ret.comment[0] === '"' && ret.comment.substr(-1) === '"') + ret.comment = ret.comment.slice(1, -1); + } + } else + return new Error('RFC4716 public key invalid header line'); + } + ret.public = new Buffer(data.slice(i, -1).join(''), 'base64'); + } + len = ret.public.readUInt32BE(0, true); + var fulltype = ret.public.toString('ascii', 4, 4 + len); + ret.fulltype = fulltype; + if (fulltype === 'ssh-dss') + ret.type = 'dss'; + else if (fulltype === 'ssh-rsa') + ret.type = 'rsa'; + else + return new Error('Unsupported RFC4716 public key type: ' + fulltype); + ret.public = ret.public.slice(11); + ret.publicOrig = new Buffer(orig); + } else if (m = RE_PPK.exec(orig)) { + // m[1] = short type + // m[2] = encryption type + // m[3] = comment + // m[4] = base64-encoded public key data: + // for "ssh-rsa": + // string "ssh-rsa" + // mpint e (public exponent) + // mpint n (modulus) + // for "ssh-dss": + // string "ssh-dss" + // mpint p (modulus) + // mpint q (prime) + // mpint g (base number) + // mpint y (public key parameter: g^x mod p) + // m[5] = base64-encoded private key data: + // for "ssh-rsa": + // mpint d (private exponent) + // mpint p (prime 1) + // mpint q (prime 2) + // mpint iqmp ([inverse of q] mod p) + // for "ssh-dss": + // mpint x (private key parameter) + // m[6] = SHA1 HMAC over: + // string name of algorithm ("ssh-dss", "ssh-rsa") + // string encryption type + // string comment + // string public key data + // string private-plaintext (including the final padding) + + // avoid cyclic require by requiring on first use + if (!utils) + utils = require('./utils'); + + ret.ppk = true; + ret.type = m[1]; + ret.fulltype = 'ssh-' + m[1]; + if (m[2] !== 'none') + ret.encryption = m[2]; + ret.comment = m[3]; + + ret.public = new Buffer(m[4].replace(/\r?\n/g, ''), 'base64'); + var privateKey = new Buffer(m[5].replace(/\r?\n/g, ''), 'base64'); + + ret.privateMAC = m[6].replace(/\r?\n/g, ''); + + // automatically verify private key MAC if we don't need to wait for + // decryption + if (!ret.encryption) { + var valid = utils.verifyPPKMAC(ret, undefined, privateKey); + if (!valid) + throw new Error('PPK MAC mismatch'); + } + + // generate a PEM encoded version of the public key + var pubkey = utils.genPublicKey(ret); + ret.public = pubkey.public; + ret.publicOrig = pubkey.publicOrig; + + ret.private = privateKey; + + // automatically convert private key data to OpenSSL format (including PEM) + // if we don't need to wait for decryption + if (!ret.encryption) + utils.convertPPKPrivate(ret); + } else + return new Error('Unsupported key format'); + + return ret; +}; diff --git a/reverse_engineering/node_modules/ssh2-streams/lib/sftp.js b/reverse_engineering/node_modules/ssh2-streams/lib/sftp.js new file mode 100644 index 0000000..9e93200 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/lib/sftp.js @@ -0,0 +1,2967 @@ +// TODO: support EXTENDED request packets + +var TransformStream = require('stream').Transform; +var ReadableStream = require('stream').Readable; +var WritableStream = require('stream').Writable; +var constants = require('fs').constants || process.binding('constants'); +var util = require('util'); +var inherits = util.inherits; +var isDate = util.isDate; +var listenerCount = require('events').EventEmitter.listenerCount; +var fs = require('fs'); + +var readString = require('./utils').readString; +var readInt = require('./utils').readInt; + +var ATTR = { + SIZE: 0x00000001, + UIDGID: 0x00000002, + PERMISSIONS: 0x00000004, + ACMODTIME: 0x00000008, + EXTENDED: 0x80000000 +}; + +var STATUS_CODE = { + OK: 0, + EOF: 1, + NO_SUCH_FILE: 2, + PERMISSION_DENIED: 3, + FAILURE: 4, + BAD_MESSAGE: 5, + NO_CONNECTION: 6, + CONNECTION_LOST: 7, + OP_UNSUPPORTED: 8 +}; +Object.keys(STATUS_CODE).forEach(function(key) { + STATUS_CODE[STATUS_CODE[key]] = key; +}); +var STATUS_CODE_STR = { + 0: 'No error', + 1: 'End of file', + 2: 'No such file or directory', + 3: 'Permission denied', + 4: 'Failure', + 5: 'Bad message', + 6: 'No connection', + 7: 'Connection lost', + 8: 'Operation unsupported' +}; +SFTPStream.STATUS_CODE = STATUS_CODE; + +var REQUEST = { + INIT: 1, + OPEN: 3, + CLOSE: 4, + READ: 5, + WRITE: 6, + LSTAT: 7, + FSTAT: 8, + SETSTAT: 9, + FSETSTAT: 10, + OPENDIR: 11, + READDIR: 12, + REMOVE: 13, + MKDIR: 14, + RMDIR: 15, + REALPATH: 16, + STAT: 17, + RENAME: 18, + READLINK: 19, + SYMLINK: 20, + EXTENDED: 200 +}; +Object.keys(REQUEST).forEach(function(key) { + REQUEST[REQUEST[key]] = key; +}); + +var RESPONSE = { + VERSION: 2, + STATUS: 101, + HANDLE: 102, + DATA: 103, + NAME: 104, + ATTRS: 105, + EXTENDED: 201 +}; +Object.keys(RESPONSE).forEach(function(key) { + RESPONSE[RESPONSE[key]] = key; +}); + +var OPEN_MODE = { + READ: 0x00000001, + WRITE: 0x00000002, + APPEND: 0x00000004, + CREAT: 0x00000008, + TRUNC: 0x00000010, + EXCL: 0x00000020 +}; +SFTPStream.OPEN_MODE = OPEN_MODE; + +var MAX_PKT_LEN = 34000; +var MAX_REQID = Math.pow(2, 32) - 1; +var CLIENT_VERSION_BUFFER = new Buffer([0, 0, 0, 5 /* length */, + REQUEST.INIT, + 0, 0, 0, 3 /* version */]); +var SERVER_VERSION_BUFFER = new Buffer([0, 0, 0, 5 /* length */, + RESPONSE.VERSION, + 0, 0, 0, 3 /* version */]); +/* + http://tools.ietf.org/html/draft-ietf-secsh-filexfer-02: + + The maximum size of a packet is in practice determined by the client + (the maximum size of read or write requests that it sends, plus a few + bytes of packet overhead). All servers SHOULD support packets of at + least 34000 bytes (where the packet size refers to the full length, + including the header above). This should allow for reads and writes + of at most 32768 bytes. + + OpenSSH caps this to 256kb instead of the ~34kb as mentioned in the sftpv3 + spec. +*/ +var RE_OPENSSH = /^SSH-2.0-(?:OpenSSH|dropbear)/; +var OPENSSH_MAX_DATA_LEN = (256 * 1024) - (2 * 1024)/*account for header data*/; + +function DEBUG_NOOP(msg) {} + +function SFTPStream(cfg, remoteIdentRaw) { + if (typeof cfg === 'string' && !remoteIdentRaw) { + remoteIdentRaw = cfg; + cfg = undefined; + } + if (typeof cfg !== 'object' || !cfg) + cfg = {}; + + TransformStream.call(this, { + highWaterMark: (typeof cfg.highWaterMark === 'number' + ? cfg.highWaterMark + : 32 * 1024) + }); + + this.debug = (typeof cfg.debug === 'function' ? cfg.debug : DEBUG_NOOP); + this.server = (cfg.server ? true : false); + this._isOpenSSH = (remoteIdentRaw && RE_OPENSSH.test(remoteIdentRaw)); + this._needContinue = false; + this._state = { + // common + status: 'packet_header', + writeReqid: -1, + pktLeft: undefined, + pktHdrBuf: new Buffer(9), // room for pktLen + pktType + req id + pktBuf: undefined, + pktType: undefined, + version: undefined, + extensions: {}, + + // client + maxDataLen: (this._isOpenSSH ? OPENSSH_MAX_DATA_LEN : 32768), + requests: {} + }; + + var self = this; + this.on('end', function() { + self.readable = false; + }).on('finish', onFinish) + .on('prefinish', onFinish); + function onFinish() { + self.writable = false; + self._cleanup(false); + } + + if (!this.server) + this.push(CLIENT_VERSION_BUFFER); +} +inherits(SFTPStream, TransformStream); + +SFTPStream.prototype.__read = TransformStream.prototype._read; +SFTPStream.prototype._read = function(n) { + if (this._needContinue) { + this._needContinue = false; + this.emit('continue'); + } + return this.__read(n); +}; +SFTPStream.prototype.__push = TransformStream.prototype.push; +SFTPStream.prototype.push = function(chunk, encoding) { + if (!this.readable) + return false; + if (chunk === null) + this.readable = false; + var ret = this.__push(chunk, encoding); + this._needContinue = (ret === false); + return ret; +}; + +SFTPStream.prototype._cleanup = function(callback) { + var state = this._state; + + state.pktBuf = undefined; // give GC something to do + + var requests = state.requests; + var keys = Object.keys(requests); + var len = keys.length; + if (len) { + if (this.readable) { + var err = new Error('SFTP session ended early'); + for (var i = 0, cb; i < len; ++i) + (cb = requests[keys[i]].cb) && cb(err); + } + state.requests = {}; + } + + if (this.readable) + this.push(null); + if (!this._readableState.endEmitted && !this._readableState.flowing) { + // Ugh! + this.resume(); + } + if (callback !== false) { + this.debug('DEBUG[SFTP]: Parser: Malformed packet'); + callback && callback(new Error('Malformed packet')); + } +}; + +SFTPStream.prototype._transform = function(chunk, encoding, callback) { + var state = this._state; + var server = this.server; + var status = state.status; + var pktType = state.pktType; + var pktBuf = state.pktBuf; + var pktLeft = state.pktLeft; + var version = state.version; + var pktHdrBuf = state.pktHdrBuf; + var requests = state.requests; + var debug = this.debug; + var chunkLen = chunk.length; + var chunkPos = 0; + var buffer; + var chunkLeft; + var id; + + while (true) { + if (status === 'discard') { + chunkLeft = (chunkLen - chunkPos); + if (pktLeft <= chunkLeft) { + chunkPos += pktLeft; + pktLeft = 0; + status = 'packet_header'; + buffer = pktBuf = undefined; + } else { + pktLeft -= chunkLeft; + break; + } + } else if (pktBuf !== undefined) { + chunkLeft = (chunkLen - chunkPos); + if (pktLeft <= chunkLeft) { + chunk.copy(pktBuf, + pktBuf.length - pktLeft, + chunkPos, + chunkPos + pktLeft); + chunkPos += pktLeft; + pktLeft = 0; + buffer = pktBuf; + pktBuf = undefined; + continue; + } else { + chunk.copy(pktBuf, pktBuf.length - pktLeft, chunkPos); + pktLeft -= chunkLeft; + break; + } + } else if (status === 'packet_header') { + if (!buffer) { + pktLeft = 5; + pktBuf = pktHdrBuf; + } else { + // here we read the right-most 5 bytes from buffer (pktHdrBuf) + pktLeft = buffer.readUInt32BE(4, true) - 1; // account for type byte + pktType = buffer[8]; + + if (server) { + if (version === undefined && pktType !== REQUEST.INIT) { + debug('DEBUG[SFTP]: Parser: Unexpected packet before init'); + this._cleanup(false); + return callback(new Error('Unexpected packet before init')); + } else if (version !== undefined && pktType === REQUEST.INIT) { + debug('DEBUG[SFTP]: Parser: Unexpected duplicate init'); + status = 'bad_pkt'; + } else if (pktLeft > MAX_PKT_LEN) { + var msg = 'Packet length (' + + pktLeft + + ') exceeds max length (' + + MAX_PKT_LEN + + ')'; + debug('DEBUG[SFTP]: Parser: ' + msg); + this._cleanup(false); + return callback(new Error(msg)); + } else if (pktType === REQUEST.EXTENDED) { + status = 'bad_pkt'; + } else if (REQUEST[pktType] === undefined) { + debug('DEBUG[SFTP]: Parser: Unsupported packet type: ' + pktType); + status = 'discard'; + } + } else if (version === undefined && pktType !== RESPONSE.VERSION) { + debug('DEBUG[SFTP]: Parser: Unexpected packet before version'); + this._cleanup(false); + return callback(new Error('Unexpected packet before version')); + } else if (version !== undefined && pktType === RESPONSE.VERSION) { + debug('DEBUG[SFTP]: Parser: Unexpected duplicate version'); + status = 'bad_pkt'; + } else if (RESPONSE[pktType] === undefined) { + status = 'discard'; + } + + if (status === 'bad_pkt') { + // copy original packet info + pktHdrBuf.writeUInt32BE(pktLeft, 0, true); + pktHdrBuf[4] = pktType; + + pktLeft = 4; + pktBuf = pktHdrBuf; + } else { + pktBuf = new Buffer(pktLeft); + status = 'payload'; + } + } + } else if (status === 'payload') { + if (pktType === RESPONSE.VERSION || pktType === REQUEST.INIT) { + /* + uint32 version + + */ + version = state.version = readInt(buffer, 0, this, callback); + if (version === false) + return; + if (version < 3) { + this._cleanup(false); + return callback(new Error('Incompatible SFTP version: ' + version)); + } else if (server) + this.push(SERVER_VERSION_BUFFER); + + var buflen = buffer.length; + var extname; + var extdata; + buffer._pos = 4; + while (buffer._pos < buflen) { + extname = readString(buffer, buffer._pos, 'ascii', this, callback); + if (extname === false) + return; + extdata = readString(buffer, buffer._pos, 'ascii', this, callback); + if (extdata === false) + return; + if (state.extensions[extname]) + state.extensions[extname].push(extdata); + else + state.extensions[extname] = [ extdata ]; + } + + this.emit('ready'); + } else { + /* + All other packets (client and server) begin with a (client) request + id: + uint32 id + */ + id = readInt(buffer, 0, this, callback); + if (id === false) + return; + + var filename; + var attrs; + var handle; + var data; + + if (!server) { + var req = requests[id]; + var cb = req && req.cb; + debug('DEBUG[SFTP]: Parser: Response: ' + RESPONSE[pktType]); + if (req && cb) { + if (pktType === RESPONSE.STATUS) { + /* + uint32 error/status code + string error message (ISO-10646 UTF-8) + string language tag + */ + var code = readInt(buffer, 4, this, callback); + if (code === false) + return; + if (code === STATUS_CODE.OK) { + cb(); + } else { + // We borrow OpenSSH behavior here, specifically we make the + // message and language fields optional, despite the + // specification requiring them (even if they are empty). This + // helps to avoid problems with buggy implementations that do + // not fully conform to the SFTP(v3) specification. + var msg; + var lang = ''; + if (buffer.length >= 12) { + msg = readString(buffer, 8, 'utf8', this, callback); + if (msg === false) + return; + if ((buffer._pos + 4) < buffer.length) { + lang = readString(buffer, + buffer._pos, + 'ascii', + this, + callback); + if (lang === false) + return; + } + } + var err = new Error(msg + || STATUS_CODE_STR[code] + || 'Unknown status'); + err.code = code; + err.lang = lang; + cb(err); + } + } else if (pktType === RESPONSE.HANDLE) { + /* + string handle + */ + handle = readString(buffer, 4, this, callback); + if (handle === false) + return; + cb(undefined, handle); + } else if (pktType === RESPONSE.DATA) { + /* + string data + */ + if (req.buffer) { + // we have already pre-allocated space to store the data + var dataLen = readInt(buffer, 4, this, callback); + if (dataLen === false) + return; + var reqBufLen = req.buffer.length; + if (dataLen > reqBufLen) { + // truncate response data to fit expected size + buffer.writeUInt32BE(reqBufLen, 4, true); + } + data = readString(buffer, 4, req.buffer, this, callback); + if (data === false) + return; + cb(undefined, data, dataLen); + } else { + data = readString(buffer, 4, this, callback); + if (data === false) + return; + cb(undefined, data); + } + } else if (pktType === RESPONSE.NAME) { + /* + uint32 count + repeats count times: + string filename + string longname + ATTRS attrs + */ + var namesLen = readInt(buffer, 4, this, callback); + if (namesLen === false) + return; + var names = [], + longname; + buffer._pos = 8; + for (var i = 0; i < namesLen; ++i) { + // we are going to assume UTF-8 for filenames despite the SFTPv3 + // spec not specifying an encoding because the specs for newer + // versions of the protocol all explicitly specify UTF-8 for + // filenames + filename = readString(buffer, + buffer._pos, + 'utf8', + this, + callback); + if (filename === false) + return; + // `longname` only exists in SFTPv3 and since it typically will + // contain the filename, we assume it is also UTF-8 + longname = readString(buffer, + buffer._pos, + 'utf8', + this, + callback); + if (longname === false) + return; + attrs = readAttrs(buffer, buffer._pos, this, callback); + if (attrs === false) + return; + names.push({ + filename: filename, + longname: longname, + attrs: attrs + }); + } + cb(undefined, names); + } else if (pktType === RESPONSE.ATTRS) { + /* + ATTRS attrs + */ + attrs = readAttrs(buffer, 4, this, callback); + if (attrs === false) + return; + cb(undefined, attrs); + } else if (pktType === RESPONSE.EXTENDED) { + if (req.extended) { + switch (req.extended) { + case 'statvfs@openssh.com': + case 'fstatvfs@openssh.com': + /* + uint64 f_bsize // file system block size + uint64 f_frsize // fundamental fs block size + uint64 f_blocks // number of blocks (unit f_frsize) + uint64 f_bfree // free blocks in file system + uint64 f_bavail // free blocks for non-root + uint64 f_files // total file inodes + uint64 f_ffree // free file inodes + uint64 f_favail // free file inodes for to non-root + uint64 f_fsid // file system id + uint64 f_flag // bit mask of f_flag values + uint64 f_namemax // maximum filename length + */ + var stats = { + f_bsize: undefined, + f_frsize: undefined, + f_blocks: undefined, + f_bfree: undefined, + f_bavail: undefined, + f_files: undefined, + f_ffree: undefined, + f_favail: undefined, + f_sid: undefined, + f_flag: undefined, + f_namemax: undefined + }; + stats.f_bsize = readUInt64BE(buffer, 4, this, callback); + if (stats.f_bsize === false) + return; + stats.f_frsize = readUInt64BE(buffer, 12, this, callback); + if (stats.f_frsize === false) + return; + stats.f_blocks = readUInt64BE(buffer, 20, this, callback); + if (stats.f_blocks === false) + return; + stats.f_bfree = readUInt64BE(buffer, 28, this, callback); + if (stats.f_bfree === false) + return; + stats.f_bavail = readUInt64BE(buffer, 36, this, callback); + if (stats.f_bavail === false) + return; + stats.f_files = readUInt64BE(buffer, 44, this, callback); + if (stats.f_files === false) + return; + stats.f_ffree = readUInt64BE(buffer, 52, this, callback); + if (stats.f_ffree === false) + return; + stats.f_favail = readUInt64BE(buffer, 60, this, callback); + if (stats.f_favail === false) + return; + stats.f_sid = readUInt64BE(buffer, 68, this, callback); + if (stats.f_sid === false) + return; + stats.f_flag = readUInt64BE(buffer, 76, this, callback); + if (stats.f_flag === false) + return; + stats.f_namemax = readUInt64BE(buffer, 84, this, callback); + if (stats.f_namemax === false) + return; + cb(undefined, stats); + break; + } + } + // XXX: at least provide the raw buffer data to the callback in + // case of unexpected extended response? + cb(); + } + } + if (req) + delete requests[id]; + } else { + // server + var evName = REQUEST[pktType]; + var offset; + var path; + + debug('DEBUG[SFTP]: Parser: Request: ' + evName); + if (listenerCount(this, evName)) { + if (pktType === REQUEST.OPEN) { + /* + string filename + uint32 pflags + ATTRS attrs + */ + filename = readString(buffer, 4, 'utf8', this, callback); + if (filename === false) + return; + var pflags = readInt(buffer, buffer._pos, this, callback); + if (pflags === false) + return; + attrs = readAttrs(buffer, buffer._pos + 4, this, callback); + if (attrs === false) + return; + this.emit(evName, id, filename, pflags, attrs); + } else if (pktType === REQUEST.CLOSE + || pktType === REQUEST.FSTAT + || pktType === REQUEST.READDIR) { + /* + string handle + */ + handle = readString(buffer, 4, this, callback); + if (handle === false) + return; + this.emit(evName, id, handle); + } else if (pktType === REQUEST.READ) { + /* + string handle + uint64 offset + uint32 len + */ + handle = readString(buffer, 4, this, callback); + if (handle === false) + return; + offset = readUInt64BE(buffer, buffer._pos, this, callback); + if (offset === false) + return; + var len = readInt(buffer, buffer._pos, this, callback); + if (len === false) + return; + this.emit(evName, id, handle, offset, len); + } else if (pktType === REQUEST.WRITE) { + /* + string handle + uint64 offset + string data + */ + handle = readString(buffer, 4, this, callback); + if (handle === false) + return; + offset = readUInt64BE(buffer, buffer._pos, this, callback); + if (offset === false) + return; + data = readString(buffer, buffer._pos, this, callback); + if (data === false) + return; + this.emit(evName, id, handle, offset, data); + } else if (pktType === REQUEST.LSTAT + || pktType === REQUEST.STAT + || pktType === REQUEST.OPENDIR + || pktType === REQUEST.REMOVE + || pktType === REQUEST.RMDIR + || pktType === REQUEST.REALPATH + || pktType === REQUEST.READLINK) { + /* + string path + */ + path = readString(buffer, 4, 'utf8', this, callback); + if (path === false) + return; + this.emit(evName, id, path); + } else if (pktType === REQUEST.SETSTAT + || pktType === REQUEST.MKDIR) { + /* + string path + ATTRS attrs + */ + path = readString(buffer, 4, 'utf8', this, callback); + if (path === false) + return; + attrs = readAttrs(buffer, buffer._pos, this, callback); + if (attrs === false) + return; + this.emit(evName, id, path, attrs); + } else if (pktType === REQUEST.FSETSTAT) { + /* + string handle + ATTRS attrs + */ + handle = readString(buffer, 4, this, callback); + if (handle === false) + return; + attrs = readAttrs(buffer, buffer._pos, this, callback); + if (attrs === false) + return; + this.emit(evName, id, handle, attrs); + } else if (pktType === REQUEST.RENAME + || pktType === REQUEST.SYMLINK) { + /* + RENAME: + string oldpath + string newpath + SYMLINK: + string linkpath + string targetpath + */ + var str1; + var str2; + str1 = readString(buffer, 4, 'utf8', this, callback); + if (str1 === false) + return; + str2 = readString(buffer, buffer._pos, 'utf8', this, callback); + if (str2 === false) + return; + if (pktType === REQUEST.SYMLINK && this._isOpenSSH) { + // OpenSSH has linkpath and targetpath positions switched + this.emit(evName, id, str2, str1); + } else + this.emit(evName, id, str1, str2); + } + } else { + // automatically reject request if no handler for request type + this.status(id, STATUS_CODE.OP_UNSUPPORTED); + } + } + } + + // prepare for next packet + status = 'packet_header'; + buffer = pktBuf = undefined; + } else if (status === 'bad_pkt') { + if (server && buffer[4] !== REQUEST.INIT) { + var errCode = (buffer[4] === REQUEST.EXTENDED + ? STATUS_CODE.OP_UNSUPPORTED + : STATUS_CODE.FAILURE); + + // no request id for init/version packets, so we have no way to send a + // status response, so we just close up shop ... + if (buffer[4] === REQUEST.INIT || buffer[4] === RESPONSE.VERSION) + return this._cleanup(callback); + + id = readInt(buffer, 5, this, callback); + if (id === false) + return; + this.status(id, errCode); + } + + // by this point we have already read the type byte and the id bytes, so + // we subtract those from the number of bytes to skip + pktLeft = buffer.readUInt32BE(0, true) - 5; + + status = 'discard'; + } + + if (chunkPos >= chunkLen) + break; + } + + state.status = status; + state.pktType = pktType; + state.pktBuf = pktBuf; + state.pktLeft = pktLeft; + state.version = version; + + callback(); +}; + +// client +SFTPStream.prototype.createReadStream = function(path, options) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + return new ReadStream(this, path, options); +}; +SFTPStream.prototype.createWriteStream = function(path, options) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + return new WriteStream(this, path, options); +}; +SFTPStream.prototype.open = function(path, flags_, attrs, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + if (typeof attrs === 'function') { + cb = attrs; + attrs = undefined; + } + + var flags = stringToFlags(flags_); + if (flags === null) + throw new Error('Unknown flags string: ' + flags_); + + var attrFlags = 0; + var attrBytes = 0; + if (typeof attrs === 'string' || typeof attrs === 'number') { + attrs = { mode: attrs }; + } + if (typeof attrs === 'object') { + attrs = attrsToBytes(attrs); + attrFlags = attrs.flags; + attrBytes = attrs.nbytes; + attrs = attrs.bytes; + } + + /* + uint32 id + string filename + uint32 pflags + ATTRS attrs + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen + 4 + 4 + attrBytes); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.OPEN; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + buf.writeUInt32BE(flags, p += pathlen, true); + buf.writeUInt32BE(attrFlags, p += 4, true); + if (attrs && attrFlags) { + p += 4; + for (var i = 0, len = attrs.length; i < len; ++i) + for (var j = 0, len2 = attrs[i].length; j < len2; ++j) + buf[p++] = attrs[i][j]; + } + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing OPEN'); + return this.push(buf); +}; +SFTPStream.prototype.close = function(handle, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + + var state = this._state; + + /* + uint32 id + string handle + */ + var handlelen = handle.length; + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + handlelen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.CLOSE; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(handlelen, p, true); + handle.copy(buf, p += 4); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing CLOSE'); + return this.push(buf); +}; +SFTPStream.prototype.readData = function(handle, buf, off, len, position, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + else if (!Buffer.isBuffer(buf)) + throw new Error('buffer is not a Buffer'); + else if (off >= buf.length) + throw new Error('offset is out of bounds'); + else if (off + len > buf.length) + throw new Error('length extends beyond buffer'); + else if (position === null) + throw new Error('null position currently unsupported'); + + var state = this._state; + + /* + uint32 id + string handle + uint64 offset + uint32 len + */ + var handlelen = handle.length; + var p = 9; + var pos = position; + var out = new Buffer(4 + 1 + 4 + 4 + handlelen + 8 + 4); + + out.writeUInt32BE(out.length - 4, 0, true); + out[4] = REQUEST.READ; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + out.writeUInt32BE(reqid, 5, true); + + out.writeUInt32BE(handlelen, p, true); + handle.copy(out, p += 4); + p += handlelen; + for (var i = 7; i >= 0; --i) { + out[p + i] = pos & 0xFF; + pos /= 256; + } + out.writeUInt32BE(len, p += 8, true); + + state.requests[reqid] = { + cb: function(err, data, nb) { + if (err && err.code !== STATUS_CODE.EOF) + return cb(err); + cb(undefined, nb || 0, data, position); + }, + buffer: buf.slice(off, off + len) + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing READ'); + return this.push(out); +}; +SFTPStream.prototype.writeData = function(handle, buf, off, len, position, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + else if (!Buffer.isBuffer(buf)) + throw new Error('buffer is not a Buffer'); + else if (off > buf.length) + throw new Error('offset is out of bounds'); + else if (off + len > buf.length) + throw new Error('length extends beyond buffer'); + else if (position === null) + throw new Error('null position currently unsupported'); + + var self = this; + var state = this._state; + + if (!len) { + cb && process.nextTick(function() { cb(undefined, 0); }); + return; + } + + var overflow = (len > state.maxDataLen + ? len - state.maxDataLen + : 0); + var origPosition = position; + + if (overflow) + len = state.maxDataLen; + + /* + uint32 id + string handle + uint64 offset + string data + */ + var handlelen = handle.length; + var p = 9; + var out = new Buffer(4 + 1 + 4 + 4 + handlelen + 8 + 4 + len); + + out.writeUInt32BE(out.length - 4, 0, true); + out[4] = REQUEST.WRITE; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + out.writeUInt32BE(reqid, 5, true); + + out.writeUInt32BE(handlelen, p, true); + handle.copy(out, p += 4); + p += handlelen; + for (var i = 7; i >= 0; --i) { + out[p + i] = position & 0xFF; + position /= 256; + } + out.writeUInt32BE(len, p += 8, true); + buf.copy(out, p += 4, off, off + len); + + state.requests[reqid] = { + cb: function(err) { + if (err) + cb && cb(err); + else if (overflow) { + self.writeData(handle, + buf, + off + len, + overflow, + origPosition + len, + cb); + } else + cb && cb(undefined, off + len); + } + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing WRITE'); + return this.push(out); +}; +function tryCreateBuffer(size) { + try { + return new Buffer(size); + } catch (ex) { + return ex; + } +} +function fastXfer(src, dst, srcPath, dstPath, opts, cb) { + var concurrency = 64; + var chunkSize = 32768; + //var preserve = false; + var onstep; + var mode; + + if (typeof opts === 'function') { + cb = opts; + } else if (typeof opts === 'object') { + if (typeof opts.concurrency === 'number' + && opts.concurrency > 0 + && !isNaN(opts.concurrency)) + concurrency = opts.concurrency; + if (typeof opts.chunkSize === 'number' + && opts.chunkSize > 0 + && !isNaN(opts.chunkSize)) + chunkSize = opts.chunkSize; + if (typeof opts.step === 'function') + onstep = opts.step; + //preserve = (opts.preserve ? true : false); + if (typeof opts.mode === 'string' || typeof opts.mode === 'number') + mode = modeNum(opts.mode); + } + + // internal state variables + var fsize; + var chunk; + var psrc = 0; + var pdst = 0; + var reads = 0; + var total = 0; + var hadError = false; + var srcHandle; + var dstHandle; + var readbuf; + var bufsize = chunkSize * concurrency; + + function onerror(err) { + if (hadError) + return; + + hadError = true; + + var left = 0; + var cbfinal; + + if (srcHandle || dstHandle) { + cbfinal = function() { + if (--left === 0) + cb(err); + }; + if (srcHandle && (src === fs || src.writable)) + ++left; + if (dstHandle && (dst === fs || dst.writable)) + ++left; + if (srcHandle && (src === fs || src.writable)) + src.close(srcHandle, cbfinal); + if (dstHandle && (dst === fs || dst.writable)) + dst.close(dstHandle, cbfinal); + } else + cb(err); + } + + src.open(srcPath, 'r', function(err, sourceHandle) { + if (err) + return onerror(err); + + srcHandle = sourceHandle; + + src.fstat(srcHandle, function tryStat(err, attrs) { + if (err) { + if (src !== fs) { + // Try stat() for sftp servers that may not support fstat() for + // whatever reason + src.stat(srcPath, function(err_, attrs_) { + if (err_) + return onerror(err); + tryStat(null, attrs_); + }); + return; + } + return onerror(err); + } + fsize = attrs.size; + + dst.open(dstPath, 'w', function(err, destHandle) { + if (err) + return onerror(err); + + dstHandle = destHandle; + + if (fsize <= 0) + return onerror(); + + // Use less memory where possible + while (bufsize > fsize) { + if (concurrency === 1) { + bufsize = fsize; + break; + } + bufsize -= chunkSize; + --concurrency; + } + + readbuf = tryCreateBuffer(bufsize); + if (readbuf instanceof Error) + return onerror(readbuf); + + if (mode !== undefined) { + dst.fchmod(dstHandle, mode, function tryAgain(err) { + if (err) { + // Try chmod() for sftp servers that may not support fchmod() for + // whatever reason + dst.chmod(dstPath, mode, function(err_) { + tryAgain(); + }); + return; + } + read(); + }); + } else { + read(); + } + + function onread(err, nb, data, dstpos, datapos) { + if (err) + return onerror(err); + + if (src === fs) + dst.writeData(dstHandle, data, datapos || 0, nb, dstpos, writeCb); + else + dst.write(dstHandle, data, datapos || 0, nb, dstpos, writeCb); + + function writeCb(err) { + if (err) + return onerror(err); + + total += nb; + onstep && onstep(total, nb, fsize); + + if (--reads === 0) { + if (total === fsize) { + dst.close(dstHandle, function(err) { + dstHandle = undefined; + if (err) + return onerror(err); + src.close(srcHandle, function(err) { + srcHandle = undefined; + if (err) + return onerror(err); + cb(); + }); + }); + } else + read(); + } + } + } + + function makeCb(psrc, pdst) { + return function(err, nb, data) { + onread(err, nb, data, pdst, psrc); + }; + } + + function read() { + while (pdst < fsize && reads < concurrency) { + chunk = (pdst + chunkSize > fsize ? fsize - pdst : chunkSize); + if (src === fs) { + src.read(srcHandle, + readbuf, + psrc, + chunk, + pdst, + makeCb(psrc, pdst)); + } else + src.readData(srcHandle, readbuf, psrc, chunk, pdst, onread); + psrc += chunk; + pdst += chunk; + ++reads; + } + psrc = 0; + } + }); + }); + }); +} +SFTPStream.prototype.fastGet = function(remotePath, localPath, opts, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + fastXfer(this, fs, remotePath, localPath, opts, cb); +}; +SFTPStream.prototype.fastPut = function(localPath, remotePath, opts, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + fastXfer(fs, this, localPath, remotePath, opts, cb); +}; +SFTPStream.prototype.readFile = function(path, options, callback_) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var callback; + if (typeof callback_ === 'function') { + callback = callback_; + } else if (typeof options === 'function') { + callback = options; + options = undefined; + } + + var self = this; + + if (typeof options === 'string') + options = { encoding: options, flag: 'r' }; + else if (!options) + options = { encoding: null, flag: 'r' }; + else if (typeof options !== 'object') + throw new TypeError('Bad arguments'); + + var encoding = options.encoding; + if (encoding && !Buffer.isEncoding(encoding)) + throw new Error('Unknown encoding: ' + encoding); + + // first, stat the file, so we know the size. + var size; + var buffer; // single buffer with file data + var buffers; // list for when size is unknown + var pos = 0; + var handle; + + // SFTPv3 does not support using -1 for read position, so we have to track + // read position manually + var bytesRead = 0; + + var flag = options.flag || 'r'; + this.open(path, flag, 438 /*=0666*/, function(er, handle_) { + if (er) + return callback && callback(er); + handle = handle_; + + self.fstat(handle, function tryStat(er, st) { + if (er) { + // Try stat() for sftp servers that may not support fstat() for + // whatever reason + self.stat(path, function(er_, st_) { + if (er_) { + return self.close(handle, function() { + callback && callback(er); + }); + } + tryStat(null, st_); + }); + return; + } + + size = st.size || 0; + if (size === 0) { + // the kernel lies about many files. + // Go ahead and try to read some bytes. + buffers = []; + return read(); + } + + buffer = new Buffer(size); + read(); + }); + }); + + function read() { + if (size === 0) { + buffer = new Buffer(8192); + self.readData(handle, buffer, 0, 8192, bytesRead, afterRead); + } else + self.readData(handle, buffer, pos, size - pos, bytesRead, afterRead); + } + + function afterRead(er, nbytes) { + if (er) { + return self.close(handle, function() { + return callback && callback(er); + }); + } + + if (nbytes === 0) + return close(); + + bytesRead += nbytes; + pos += nbytes; + if (size !== 0) { + if (pos === size) + close(); + else + read(); + } else { + // unknown size, just read until we don't get bytes. + buffers.push(buffer.slice(0, nbytes)); + read(); + } + } + + function close() { + self.close(handle, function(er) { + if (size === 0) { + // collected the data into the buffers list. + buffer = Buffer.concat(buffers, pos); + } else if (pos < size) + buffer = buffer.slice(0, pos); + + if (encoding) + buffer = buffer.toString(encoding); + return callback && callback(er, buffer); + }); + } +}; +function writeAll(self, handle, buffer, offset, length, position, callback_) { + var callback = (typeof callback_ === 'function' ? callback_ : undefined); + + self.writeData(handle, + buffer, + offset, + length, + position, + function(writeErr, written) { + if (writeErr) { + return self.close(handle, function() { + callback && callback(writeErr); + }); + } + if (written === length) + self.close(handle, callback); + else { + offset += written; + length -= written; + position += written; + writeAll(self, handle, buffer, offset, length, position, callback); + } + }); +} +SFTPStream.prototype.writeFile = function(path, data, options, callback_) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var callback; + if (typeof callback_ === 'function') { + callback = callback_; + } else if (typeof options === 'function') { + callback = options; + options = undefined; + } + var self = this; + + if (typeof options === 'string') + options = { encoding: options, mode: 438, flag: 'w' }; + else if (!options) + options = { encoding: 'utf8', mode: 438 /*=0666*/, flag: 'w' }; + else if (typeof options !== 'object') + throw new TypeError('Bad arguments'); + + if (options.encoding && !Buffer.isEncoding(options.encoding)) + throw new Error('Unknown encoding: ' + options.encoding); + + var flag = options.flag || 'w'; + this.open(path, flag, options.mode, function(openErr, handle) { + if (openErr) + callback && callback(openErr); + else { + var buffer = (Buffer.isBuffer(data) + ? data + : new Buffer('' + data, options.encoding || 'utf8')); + var position = (/a/.test(flag) ? null : 0); + + // SFTPv3 does not support the notion of 'current position' + // (null position), so we just attempt to append to the end of the file + // instead + if (position === null) { + self.fstat(handle, function tryStat(er, st) { + if (er) { + // Try stat() for sftp servers that may not support fstat() for + // whatever reason + self.stat(path, function(er_, st_) { + if (er_) { + return self.close(handle, function() { + callback && callback(er); + }); + } + tryStat(null, st_); + }); + return; + } + writeAll(self, handle, buffer, 0, buffer.length, st.size, callback); + }); + return; + } + writeAll(self, handle, buffer, 0, buffer.length, position, callback); + } + }); +}; +SFTPStream.prototype.appendFile = function(path, data, options, callback_) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var callback; + if (typeof callback_ === 'function') { + callback = callback_; + } else if (typeof options === 'function') { + callback = options; + options = undefined; + } + + if (typeof options === 'string') + options = { encoding: options, mode: 438, flag: 'a' }; + else if (!options) + options = { encoding: 'utf8', mode: 438 /*=0666*/, flag: 'a' }; + else if (typeof options !== 'object') + throw new TypeError('Bad arguments'); + + if (!options.flag) + options = util._extend({ flag: 'a' }, options); + this.writeFile(path, data, options, callback); +}; +SFTPStream.prototype.exists = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + this.stat(path, function(err) { + cb && cb(err ? false : true); + }); +}; +SFTPStream.prototype.unlink = function(filename, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string filename + */ + var fnamelen = Buffer.byteLength(filename); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + fnamelen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.REMOVE; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(fnamelen, p, true); + buf.write(filename, p += 4, fnamelen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing REMOVE'); + return this.push(buf); +}; +SFTPStream.prototype.rename = function(oldPath, newPath, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string oldpath + string newpath + */ + var oldlen = Buffer.byteLength(oldPath); + var newlen = Buffer.byteLength(newPath); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + oldlen + 4 + newlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.RENAME; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(oldlen, p, true); + buf.write(oldPath, p += 4, oldlen, 'utf8'); + buf.writeUInt32BE(newlen, p += oldlen, true); + buf.write(newPath, p += 4, newlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing RENAME'); + return this.push(buf); +}; +SFTPStream.prototype.mkdir = function(path, attrs, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var flags = 0; + var attrBytes = 0; + var state = this._state; + + if (typeof attrs === 'function') { + cb = attrs; + attrs = undefined; + } + if (typeof attrs === 'object') { + attrs = attrsToBytes(attrs); + flags = attrs.flags; + attrBytes = attrs.nbytes; + attrs = attrs.bytes; + } + + /* + uint32 id + string path + ATTRS attrs + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen + 4 + attrBytes); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.MKDIR; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + buf.writeUInt32BE(flags, p += pathlen); + if (flags) { + p += 4; + for (var i = 0, len = attrs.length; i < len; ++i) + for (var j = 0, len2 = attrs[i].length; j < len2; ++j) + buf[p++] = attrs[i][j]; + } + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing MKDIR'); + return this.push(buf); +}; +SFTPStream.prototype.rmdir = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.RMDIR; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing RMDIR'); + return this.push(buf); +}; +SFTPStream.prototype.readdir = function(where, opts, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + var doFilter; + + if (typeof opts === 'function') { + cb = opts; + opts = {}; + } + if (typeof opts !== 'object') + opts = {}; + + doFilter = (opts && opts.full ? false : true); + + if (!Buffer.isBuffer(where) && typeof where !== 'string') + throw new Error('missing directory handle or path'); + + if (typeof where === 'string') { + var self = this; + var entries = []; + var e = 0; + + return this.opendir(where, function reread(err, handle) { + if (err) + return cb(err); + + self.readdir(handle, opts, function(err, list) { + var eof = (err && err.code === STATUS_CODE.EOF); + + if (err && !eof) { + return self.close(handle, function() { + cb(err); + }); + } else if (eof) { + return self.close(handle, function(err) { + if (err) + return cb(err); + cb(undefined, entries); + }); + } + + for (var i = 0, len = list.length; i < len; ++i, ++e) + entries[e] = list[i]; + + reread(undefined, handle); + }); + }); + } + + /* + uint32 id + string handle + */ + var handlelen = where.length; + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + handlelen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.READDIR; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(handlelen, p, true); + where.copy(buf, p += 4); + + state.requests[reqid] = { + cb: (doFilter + ? function(err, list) { + if (err) + return cb(err); + + for (var i = list.length - 1; i >= 0; --i) { + if (list[i].filename === '.' || list[i].filename === '..') + list.splice(i, 1); + } + + cb(undefined, list); + } + : cb) + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing READDIR'); + return this.push(buf); +}; +SFTPStream.prototype.fstat = function(handle, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + + var state = this._state; + + /* + uint32 id + string handle + */ + var handlelen = handle.length; + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + handlelen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.FSTAT; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(handlelen, p, true); + handle.copy(buf, p += 4); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing FSTAT'); + return this.push(buf); +}; +SFTPStream.prototype.stat = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.STAT; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing STAT'); + return this.push(buf); +}; +SFTPStream.prototype.lstat = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.LSTAT; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing LSTAT'); + return this.push(buf); +}; +SFTPStream.prototype.opendir = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.OPENDIR; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing OPENDIR'); + return this.push(buf); +}; +SFTPStream.prototype.setstat = function(path, attrs, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var flags = 0; + var attrBytes = 0; + var state = this._state; + + if (typeof attrs === 'object') { + attrs = attrsToBytes(attrs); + flags = attrs.flags; + attrBytes = attrs.nbytes; + attrs = attrs.bytes; + } else if (typeof attrs === 'function') + cb = attrs; + + /* + uint32 id + string path + ATTRS attrs + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen + 4 + attrBytes); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.SETSTAT; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + buf.writeUInt32BE(flags, p += pathlen); + if (flags) { + p += 4; + for (var i = 0, len = attrs.length; i < len; ++i) + for (var j = 0, len2 = attrs[i].length; j < len2; ++j) + buf[p++] = attrs[i][j]; + } + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing SETSTAT'); + return this.push(buf); +}; +SFTPStream.prototype.fsetstat = function(handle, attrs, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + + var flags = 0; + var attrBytes = 0; + var state = this._state; + + if (typeof attrs === 'object') { + attrs = attrsToBytes(attrs); + flags = attrs.flags; + attrBytes = attrs.nbytes; + attrs = attrs.bytes; + } else if (typeof attrs === 'function') + cb = attrs; + + /* + uint32 id + string handle + ATTRS attrs + */ + var handlelen = handle.length; + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + handlelen + 4 + attrBytes); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.FSETSTAT; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(handlelen, p, true); + handle.copy(buf, p += 4); + buf.writeUInt32BE(flags, p += handlelen); + if (flags) { + p += 4; + for (var i = 0, len = attrs.length; i < len; ++i) + for (var j = 0, len2 = attrs[i].length; j < len2; ++j) + buf[p++] = attrs[i][j]; + } + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing FSETSTAT'); + return this.push(buf); +}; +SFTPStream.prototype.futimes = function(handle, atime, mtime, cb) { + return this.fsetstat(handle, { + atime: toUnixTimestamp(atime), + mtime: toUnixTimestamp(mtime) + }, cb); +}; +SFTPStream.prototype.utimes = function(path, atime, mtime, cb) { + return this.setstat(path, { + atime: toUnixTimestamp(atime), + mtime: toUnixTimestamp(mtime) + }, cb); +}; +SFTPStream.prototype.fchown = function(handle, uid, gid, cb) { + return this.fsetstat(handle, { + uid: uid, + gid: gid + }, cb); +}; +SFTPStream.prototype.chown = function(path, uid, gid, cb) { + return this.setstat(path, { + uid: uid, + gid: gid + }, cb); +}; +SFTPStream.prototype.fchmod = function(handle, mode, cb) { + return this.fsetstat(handle, { + mode: mode + }, cb); +}; +SFTPStream.prototype.chmod = function(path, mode, cb) { + return this.setstat(path, { + mode: mode + }, cb); +}; +SFTPStream.prototype.readlink = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.READLINK; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { + cb: function(err, names) { + if (err) + return cb(err); + else if (!names || !names.length) + return cb(new Error('Response missing link info')); + cb(undefined, names[0].filename); + } + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing READLINK'); + return this.push(buf); +}; +SFTPStream.prototype.symlink = function(targetPath, linkPath, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string linkpath + string targetpath + */ + var linklen = Buffer.byteLength(linkPath); + var targetlen = Buffer.byteLength(targetPath); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + linklen + 4 + targetlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.SYMLINK; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + if (this._isOpenSSH) { + // OpenSSH has linkpath and targetpath positions switched + buf.writeUInt32BE(targetlen, p, true); + buf.write(targetPath, p += 4, targetlen, 'utf8'); + buf.writeUInt32BE(linklen, p += targetlen, true); + buf.write(linkPath, p += 4, linklen, 'utf8'); + } else { + buf.writeUInt32BE(linklen, p, true); + buf.write(linkPath, p += 4, linklen, 'utf8'); + buf.writeUInt32BE(targetlen, p += linklen, true); + buf.write(targetPath, p += 4, targetlen, 'utf8'); + } + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing SYMLINK'); + return this.push(buf); +}; +SFTPStream.prototype.realpath = function(path, cb) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var state = this._state; + + /* + uint32 id + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.REALPATH; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { + cb: function(err, names) { + if (err) + return cb(err); + else if (!names || !names.length) + return cb(new Error('Response missing path info')); + cb(undefined, names[0].filename); + } + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing REALPATH'); + return this.push(buf); +}; +// extended requests +SFTPStream.prototype.ext_openssh_rename = function(oldPath, newPath, cb) { + var state = this._state; + + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!state.extensions['posix-rename@openssh.com'] + || state.extensions['posix-rename@openssh.com'].indexOf('1') === -1) + throw new Error('Server does not support this extended request'); + + /* + uint32 id + string "posix-rename@openssh.com" + string oldpath + string newpath + */ + var oldlen = Buffer.byteLength(oldPath); + var newlen = Buffer.byteLength(newPath); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + 24 + 4 + oldlen + 4 + newlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.EXTENDED; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + buf.writeUInt32BE(24, p, true); + buf.write('posix-rename@openssh.com', p += 4, 24, 'ascii'); + + buf.writeUInt32BE(oldlen, p += 24, true); + buf.write(oldPath, p += 4, oldlen, 'utf8'); + buf.writeUInt32BE(newlen, p += oldlen, true); + buf.write(newPath, p += 4, newlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing posix-rename@openssh.com'); + return this.push(buf); +}; +SFTPStream.prototype.ext_openssh_statvfs = function(path, cb) { + var state = this._state; + + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!state.extensions['statvfs@openssh.com'] + || state.extensions['statvfs@openssh.com'].indexOf('2') === -1) + throw new Error('Server does not support this extended request'); + + /* + uint32 id + string "statvfs@openssh.com" + string path + */ + var pathlen = Buffer.byteLength(path); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + 19 + 4 + pathlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.EXTENDED; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + buf.writeUInt32BE(19, p, true); + buf.write('statvfs@openssh.com', p += 4, 19, 'ascii'); + + buf.writeUInt32BE(pathlen, p += 19, true); + buf.write(path, p += 4, pathlen, 'utf8'); + + state.requests[reqid] = { + extended: 'statvfs@openssh.com', + cb: cb + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing statvfs@openssh.com'); + return this.push(buf); +}; +SFTPStream.prototype.ext_openssh_fstatvfs = function(handle, cb) { + var state = this._state; + + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!state.extensions['fstatvfs@openssh.com'] + || state.extensions['fstatvfs@openssh.com'].indexOf('2') === -1) + throw new Error('Server does not support this extended request'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + + /* + uint32 id + string "fstatvfs@openssh.com" + string handle + */ + var handlelen = handle.length; + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + 20 + 4 + handlelen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.EXTENDED; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + buf.writeUInt32BE(20, p, true); + buf.write('fstatvfs@openssh.com', p += 4, 20, 'ascii'); + + buf.writeUInt32BE(handlelen, p += 20, true); + buf.write(handle, p += 4, handlelen, 'utf8'); + + state.requests[reqid] = { + extended: 'fstatvfs@openssh.com', + cb: cb + }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing fstatvfs@openssh.com'); + return this.push(buf); +}; +SFTPStream.prototype.ext_openssh_hardlink = function(oldPath, newPath, cb) { + var state = this._state; + + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!state.extensions['hardlink@openssh.com'] + || state.extensions['hardlink@openssh.com'].indexOf('1') === -1) + throw new Error('Server does not support this extended request'); + + /* + uint32 id + string "hardlink@openssh.com" + string oldpath + string newpath + */ + var oldlen = Buffer.byteLength(oldPath); + var newlen = Buffer.byteLength(newPath); + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + 20 + 4 + oldlen + 4 + newlen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.EXTENDED; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + buf.writeUInt32BE(20, p, true); + buf.write('hardlink@openssh.com', p += 4, 20, 'ascii'); + + buf.writeUInt32BE(oldlen, p += 20, true); + buf.write(oldPath, p += 4, oldlen, 'utf8'); + buf.writeUInt32BE(newlen, p += oldlen, true); + buf.write(newPath, p += 4, newlen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing hardlink@openssh.com'); + return this.push(buf); +}; +SFTPStream.prototype.ext_openssh_fsync = function(handle, cb) { + var state = this._state; + + if (this.server) + throw new Error('Client-only method called in server mode'); + else if (!state.extensions['fsync@openssh.com'] + || state.extensions['fsync@openssh.com'].indexOf('1') === -1) + throw new Error('Server does not support this extended request'); + else if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + + /* + uint32 id + string "fsync@openssh.com" + string handle + */ + var handlelen = handle.length; + var p = 9; + var buf = new Buffer(4 + 1 + 4 + 4 + 17 + 4 + handlelen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = REQUEST.EXTENDED; + var reqid = state.writeReqid = (state.writeReqid + 1) % MAX_REQID; + buf.writeUInt32BE(reqid, 5, true); + buf.writeUInt32BE(17, p, true); + buf.write('fsync@openssh.com', p += 4, 17, 'ascii'); + + buf.writeUInt32BE(handlelen, p += 17, true); + buf.write(handle, p += 4, handlelen, 'utf8'); + + state.requests[reqid] = { cb: cb }; + + this.debug('DEBUG[SFTP]: Outgoing: Writing fsync@openssh.com'); + return this.push(buf); +}; + +// server +SFTPStream.prototype.status = function(id, code, message, lang) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + if (!STATUS_CODE[code] || typeof code !== 'number') + throw new Error('Bad status code: ' + code); + + message || (message = ''); + lang || (lang = ''); + + var msgLen = Buffer.byteLength(message); + var langLen = Buffer.byteLength(lang); + var buf = new Buffer(4 + 1 + 4 + 4 + 4 + msgLen + 4 + langLen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = RESPONSE.STATUS; + buf.writeUInt32BE(id, 5, true); + + buf.writeUInt32BE(code, 9, true); + + buf.writeUInt32BE(msgLen, 13, true); + if (msgLen) + buf.write(message, 17, msgLen, 'utf8'); + + buf.writeUInt32BE(langLen, 17 + msgLen, true); + if (langLen) + buf.write(lang, 17 + msgLen + 4, langLen, 'ascii'); + + this.debug('DEBUG[SFTP]: Outgoing: Writing STATUS'); + return this.push(buf); +}; +SFTPStream.prototype.handle = function(id, handle) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + if (!Buffer.isBuffer(handle)) + throw new Error('handle is not a Buffer'); + + var handleLen = handle.length; + + if (handleLen > 256) + throw new Error('handle too large (> 256 bytes)'); + + var buf = new Buffer(4 + 1 + 4 + 4 + handleLen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = RESPONSE.HANDLE; + buf.writeUInt32BE(id, 5, true); + + buf.writeUInt32BE(handleLen, 9, true); + if (handleLen) + handle.copy(buf, 13); + + this.debug('DEBUG[SFTP]: Outgoing: Writing HANDLE'); + return this.push(buf); +}; +SFTPStream.prototype.data = function(id, data, encoding) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var isBuffer = Buffer.isBuffer(data); + + if (!isBuffer && typeof data !== 'string') + throw new Error('data is not a Buffer or string'); + + if (!isBuffer) + encoding || (encoding = 'utf8'); + + var dataLen = (isBuffer ? data.length : Buffer.byteLength(data, encoding)); + var buf = new Buffer(4 + 1 + 4 + 4 + dataLen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = RESPONSE.DATA; + buf.writeUInt32BE(id, 5, true); + + buf.writeUInt32BE(dataLen, 9, true); + if (dataLen) { + if (isBuffer) + data.copy(buf, 13); + else + buf.write(data, 13, dataLen, encoding); + } + + this.debug('DEBUG[SFTP]: Outgoing: Writing DATA'); + return this.push(buf); +}; +SFTPStream.prototype.name = function(id, names) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + if (!Array.isArray(names) && typeof names === 'object') + names = [ names ]; + else if (!Array.isArray(names)) + throw new Error('names is not an object or array'); + + var count = names.length; + var namesLen = 0; + var nameAttrs; + var attrs = []; + var name; + var filename; + var longname; + var attr; + var len; + var len2; + var buf; + var p; + var i; + var j; + var k; + + for (i = 0; i < count; ++i) { + name = names[i]; + filename = (!name || !name.filename || typeof name.filename !== 'string' + ? '' + : name.filename); + namesLen += 4 + Buffer.byteLength(filename); + longname = (!name || !name.longname || typeof name.longname !== 'string' + ? '' + : name.longname); + namesLen += 4 + Buffer.byteLength(longname); + + if (typeof name.attrs === 'object') { + nameAttrs = attrsToBytes(name.attrs); + namesLen += 4 + nameAttrs.nbytes; + attrs.push(nameAttrs); + } else { + namesLen += 4; + attrs.push(null); + } + } + + buf = new Buffer(4 + 1 + 4 + 4 + namesLen); + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = RESPONSE.NAME; + buf.writeUInt32BE(id, 5, true); + + buf.writeUInt32BE(count, 9, true); + + p = 13; + + for (i = 0; i < count; ++i) { + name = names[i]; + + filename = (!name || !name.filename || typeof name.filename !== 'string' + ? '' + : name.filename); + len = Buffer.byteLength(filename); + buf.writeUInt32BE(len, p, true); + p += 4; + if (len) { + buf.write(filename, p, len, 'utf8'); + p += len; + } + + longname = (!name || !name.longname || typeof name.longname !== 'string' + ? '' + : name.longname); + len = Buffer.byteLength(longname); + buf.writeUInt32BE(len, p, true); + p += 4; + if (len) { + buf.write(longname, p, len, 'utf8'); + p += len; + } + + attr = attrs[i]; + if (attr) { + buf.writeUInt32BE(attr.flags, p, true); + p += 4; + if (attr.flags && attr.bytes) { + var bytes = attr.bytes; + for (j = 0, len = bytes.length; j < len; ++j) + for (k = 0, len2 = bytes[j].length; k < len2; ++k) + buf[p++] = bytes[j][k]; + } + } else { + buf.writeUInt32BE(0, p, true); + p += 4; + } + } + + this.debug('DEBUG[SFTP]: Outgoing: Writing NAME'); + return this.push(buf); +}; +SFTPStream.prototype.attrs = function(id, attrs) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + if (typeof attrs !== 'object') + throw new Error('attrs is not an object'); + + var info = attrsToBytes(attrs); + var buf = new Buffer(4 + 1 + 4 + 4 + info.nbytes); + var p = 13; + + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = RESPONSE.ATTRS; + buf.writeUInt32BE(id, 5, true); + + buf.writeUInt32BE(info.flags, 9, true); + + if (info.flags && info.bytes) { + var bytes = info.bytes; + for (var j = 0, len = bytes.length; j < len; ++j) + for (var k = 0, len2 = bytes[j].length; k < len2; ++k) + buf[p++] = bytes[j][k]; + } + + this.debug('DEBUG[SFTP]: Outgoing: Writing ATTRS'); + return this.push(buf); +}; + +function readAttrs(buf, p, stream, callback) { + /* + uint32 flags + uint64 size present only if flag SSH_FILEXFER_ATTR_SIZE + uint32 uid present only if flag SSH_FILEXFER_ATTR_UIDGID + uint32 gid present only if flag SSH_FILEXFER_ATTR_UIDGID + uint32 permissions present only if flag SSH_FILEXFER_ATTR_PERMISSIONS + uint32 atime present only if flag SSH_FILEXFER_ACMODTIME + uint32 mtime present only if flag SSH_FILEXFER_ACMODTIME + uint32 extended_count present only if flag SSH_FILEXFER_ATTR_EXTENDED + string extended_type + string extended_data + ... more extended data (extended_type - extended_data pairs), + so that number of pairs equals extended_count + */ + var flags = buf.readUInt32BE(p, true); + var attrs = new Stats(); + + p += 4; + + if (flags & ATTR.SIZE) { + var size = readUInt64BE(buf, p, stream, callback); + if (size === false) + return false; + attrs.size = size; + p += 8; + } + if (flags & ATTR.UIDGID) { + var uid; + var gid; + uid = readInt(buf, p, this, callback); + if (uid === false) + return false; + attrs.uid = uid; + p += 4; + gid = readInt(buf, p, this, callback); + if (gid === false) + return false; + attrs.gid = gid; + p += 4; + } + if (flags & ATTR.PERMISSIONS) { + var mode = readInt(buf, p, this, callback); + if (mode === false) + return false; + attrs.mode = mode; + // backwards compatibility + attrs.permissions = mode; + p += 4; + } + if (flags & ATTR.ACMODTIME) { + var atime; + var mtime; + atime = readInt(buf, p, this, callback); + if (atime === false) + return false; + attrs.atime = atime; + p += 4; + mtime = readInt(buf, p, this, callback); + if (mtime === false) + return false; + attrs.mtime = mtime; + p += 4; + } + if (flags & ATTR.EXTENDED) { + // TODO: read/parse extended data + var extcount = readInt(buf, p, this, callback); + if (extcount === false) + return false; + p += 4; + for (var i = 0, len; i < extcount; ++i) { + len = readInt(buf, p, this, callback); + if (len === false) + return false; + p += 4 + len; + } + } + + buf._pos = p; + + return attrs; +} + +function readUInt64BE(buffer, p, stream, callback) { + if ((buffer.length - p) < 8) { + stream && stream._cleanup(callback); + return false; + } + + var val = 0; + + for (var len = p + 8; p < len; ++p) { + val *= 256; + val += buffer[p]; + } + + buffer._pos = p; + + return val; +} + +function attrsToBytes(attrs) { + var flags = 0; + var attrBytes = 0; + var ret = []; + var i = 0; + + if (typeof attrs.size === 'number') { + flags |= ATTR.SIZE; + attrBytes += 8; + var sizeBytes = new Array(8); + var val = attrs.size; + for (i = 7; i >= 0; --i) { + sizeBytes[i] = val & 0xFF; + val /= 256; + } + ret.push(sizeBytes); + } + if (typeof attrs.uid === 'number' && typeof attrs.gid === 'number') { + flags |= ATTR.UIDGID; + attrBytes += 8; + ret.push([(attrs.uid >> 24) & 0xFF, (attrs.uid >> 16) & 0xFF, + (attrs.uid >> 8) & 0xFF, attrs.uid & 0xFF]); + ret.push([(attrs.gid >> 24) & 0xFF, (attrs.gid >> 16) & 0xFF, + (attrs.gid >> 8) & 0xFF, attrs.gid & 0xFF]); + } + if (typeof attrs.permissions === 'number' + || typeof attrs.permissions === 'string' + || typeof attrs.mode === 'number' + || typeof attrs.mode === 'string') { + var mode = modeNum(attrs.mode || attrs.permissions); + flags |= ATTR.PERMISSIONS; + attrBytes += 4; + ret.push([(mode >> 24) & 0xFF, + (mode >> 16) & 0xFF, + (mode >> 8) & 0xFF, + mode & 0xFF]); + } + if ((typeof attrs.atime === 'number' || isDate(attrs.atime)) + && (typeof attrs.mtime === 'number' || isDate(attrs.mtime))) { + var atime = toUnixTimestamp(attrs.atime); + var mtime = toUnixTimestamp(attrs.mtime); + + flags |= ATTR.ACMODTIME; + attrBytes += 8; + ret.push([(atime >> 24) & 0xFF, (atime >> 16) & 0xFF, + (atime >> 8) & 0xFF, atime & 0xFF]); + ret.push([(mtime >> 24) & 0xFF, (mtime >> 16) & 0xFF, + (mtime >> 8) & 0xFF, mtime & 0xFF]); + } + // TODO: extended attributes + + return { flags: flags, nbytes: attrBytes, bytes: ret }; +} + +function toUnixTimestamp(time) { + if (typeof time === 'number' && !isNaN(time)) + return time; + else if (isDate(time)) + return parseInt(time.getTime() / 1000, 10); + throw new Error('Cannot parse time: ' + time); +} + +function modeNum(mode) { + if (typeof mode === 'number' && !isNaN(mode)) + return mode; + else if (typeof mode === 'string') + return modeNum(parseInt(mode, 8)); + throw new Error('Cannot parse mode: ' + mode); +} + +var stringFlagMap = { + 'r': OPEN_MODE.READ, + 'r+': OPEN_MODE.READ | OPEN_MODE.WRITE, + 'w': OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.WRITE, + 'wx': OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.WRITE | OPEN_MODE.EXCL, + 'xw': OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.WRITE | OPEN_MODE.EXCL, + 'w+': OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.READ | OPEN_MODE.WRITE, + 'wx+': OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.READ | OPEN_MODE.WRITE + | OPEN_MODE.EXCL, + 'xw+': OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.READ | OPEN_MODE.WRITE + | OPEN_MODE.EXCL, + 'a': OPEN_MODE.APPEND | OPEN_MODE.CREAT | OPEN_MODE.WRITE, + 'ax': OPEN_MODE.APPEND | OPEN_MODE.CREAT | OPEN_MODE.WRITE | OPEN_MODE.EXCL, + 'xa': OPEN_MODE.APPEND | OPEN_MODE.CREAT | OPEN_MODE.WRITE | OPEN_MODE.EXCL, + 'a+': OPEN_MODE.APPEND | OPEN_MODE.CREAT | OPEN_MODE.READ | OPEN_MODE.WRITE, + 'ax+': OPEN_MODE.APPEND | OPEN_MODE.CREAT | OPEN_MODE.READ | OPEN_MODE.WRITE + | OPEN_MODE.EXCL, + 'xa+': OPEN_MODE.APPEND | OPEN_MODE.CREAT | OPEN_MODE.READ | OPEN_MODE.WRITE + | OPEN_MODE.EXCL +}; +var stringFlagMapKeys = Object.keys(stringFlagMap); + +function stringToFlags(str) { + var flags = stringFlagMap[str]; + if (flags !== undefined) + return flags; + return null; +} +SFTPStream.stringToFlags = stringToFlags; + +function flagsToString(flags) { + for (var i = 0; i < stringFlagMapKeys.length; ++i) { + var key = stringFlagMapKeys[i]; + if (stringFlagMap[key] === flags) + return key; + } + return null; +} +SFTPStream.flagsToString = flagsToString; + +function Stats(initial) { + this.mode = (initial && initial.mode); + this.permissions = this.mode; // backwards compatiblity + this.uid = (initial && initial.uid); + this.gid = (initial && initial.gid); + this.size = (initial && initial.size); + this.atime = (initial && initial.atime); + this.mtime = (initial && initial.mtime); +} +Stats.prototype._checkModeProperty = function(property) { + return ((this.mode & constants.S_IFMT) === property); +}; +Stats.prototype.isDirectory = function() { + return this._checkModeProperty(constants.S_IFDIR); +}; +Stats.prototype.isFile = function() { + return this._checkModeProperty(constants.S_IFREG); +}; +Stats.prototype.isBlockDevice = function() { + return this._checkModeProperty(constants.S_IFBLK); +}; +Stats.prototype.isCharacterDevice = function() { + return this._checkModeProperty(constants.S_IFCHR); +}; +Stats.prototype.isSymbolicLink = function() { + return this._checkModeProperty(constants.S_IFLNK); +}; +Stats.prototype.isFIFO = function() { + return this._checkModeProperty(constants.S_IFIFO); +}; +Stats.prototype.isSocket = function() { + return this._checkModeProperty(constants.S_IFSOCK); +}; +SFTPStream.Stats = Stats; + + +// ReadStream-related +var kMinPoolSpace = 128; +var pool; +function allocNewPool(poolSize) { + pool = new Buffer(poolSize); + pool.used = 0; +} + +function ReadStream(sftp, path, options) { + if (!(this instanceof ReadStream)) + return new ReadStream(sftp, path, options); + + var self = this; + + if (options === undefined) + options = {}; + else if (typeof options === 'string') + options = { encoding: options }; + else if (options === null || typeof options !== 'object') + throw new TypeError('"options" argument must be a string or an object'); + else + options = Object.create(options); + + // a little bit bigger buffer and water marks by default + if (options.highWaterMark === undefined) + options.highWaterMark = 64 * 1024; + + ReadableStream.call(this, options); + + this.path = path; + this.handle = options.handle === undefined ? null : options.handle; + this.flags = options.flags === undefined ? 'r' : options.flags; + this.mode = options.mode === undefined ? 438/*0666*/ : options.mode; + + this.start = options.start === undefined ? undefined : options.start; + this.end = options.end === undefined ? undefined : options.end; + this.autoClose = options.autoClose === undefined ? true : options.autoClose; + this.pos = 0; + this.sftp = sftp; + + if (this.start !== undefined) { + if (typeof this.start !== 'number') + throw new TypeError('start must be a Number'); + if (this.end === undefined) + this.end = Infinity; + else if (typeof this.end !== 'number') + throw new TypeError('end must be a Number'); + + if (this.start > this.end) + throw new Error('start must be <= end'); + else if (this.start < 0) + throw new Error('start must be >= zero'); + + this.pos = this.start; + } + + this.on('end', function() { + if (self.autoClose) { + self.destroy(); + } + }); + + if (!Buffer.isBuffer(this.handle)) + this.open(); +} +inherits(ReadStream, ReadableStream); + +ReadStream.prototype.open = function() { + var self = this; + this.sftp.open(this.path, this.flags, this.mode, function(er, handle) { + if (er) { + self.emit('error', er); + this.destroyed = this.closed = true; + self.emit('close'); + return; + } + + self.handle = handle; + self.emit('open', handle); + // start the flow of data. + self.read(); + }); +}; + +ReadStream.prototype._read = function(n) { + if (!Buffer.isBuffer(this.handle)) { + return this.once('open', function() { + this._read(n); + }); + } + + if (this.destroyed) + return; + + if (!pool || pool.length - pool.used < kMinPoolSpace) { + // discard the old pool. + pool = null; + allocNewPool(this._readableState.highWaterMark); + } + + // Grab another reference to the pool in the case that while we're + // in the thread pool another read() finishes up the pool, and + // allocates a new one. + var thisPool = pool; + var toRead = Math.min(pool.length - pool.used, n); + var start = pool.used; + + if (this.end !== undefined) + toRead = Math.min(this.end - this.pos + 1, toRead); + + // already read everything we were supposed to read! + // treat as EOF. + if (toRead <= 0) + return this.push(null); + + // the actual read. + var self = this; + this.sftp.readData(this.handle, pool, pool.used, toRead, this.pos, onread); + + // move the pool positions, and internal position for reading. + this.pos += toRead; + pool.used += toRead; + + function onread(er, bytesRead) { + if (er) { + if (self.autoClose) + self.destroy(); + self.emit('error', er); + } else { + var b = null; + if (bytesRead > 0) + b = thisPool.slice(start, start + bytesRead); + + self.push(b); + } + } +}; + +ReadStream.prototype.destroy = function() { + if (this.destroyed) + return; + this.destroyed = true; + if (Buffer.isBuffer(this.handle)) + this.close(); +}; + + +ReadStream.prototype.close = function(cb) { + var self = this; + if (cb) + this.once('close', cb); + if (this.closed || !Buffer.isBuffer(this.handle)) { + if (!Buffer.isBuffer(this.handle)) { + this.once('open', close); + return; + } + return process.nextTick(this.emit.bind(this, 'close')); + } + this.closed = true; + close(); + + function close(handle) { + self.sftp.close(handle || self.handle, function(er) { + if (er) + self.emit('error', er); + else + self.emit('close'); + }); + self.handle = null; + } +}; + + +function WriteStream(sftp, path, options) { + if (!(this instanceof WriteStream)) + return new WriteStream(sftp, path, options); + + if (options === undefined) + options = {}; + else if (typeof options === 'string') + options = { encoding: options }; + else if (options === null || typeof options !== 'object') + throw new TypeError('"options" argument must be a string or an object'); + else + options = Object.create(options); + + WritableStream.call(this, options); + + this.path = path; + this.handle = options.handle === undefined ? null : options.handle; + this.flags = options.flags === undefined ? 'w' : options.flags; + this.mode = options.mode === undefined ? 438/*0666*/ : options.mode; + + this.start = options.start === undefined ? undefined : options.start; + this.autoClose = options.autoClose === undefined ? true : options.autoClose; + this.pos = 0; + this.bytesWritten = 0; + this.sftp = sftp; + + if (this.start !== undefined) { + if (typeof this.start !== 'number') + throw new TypeError('start must be a Number'); + if (this.start < 0) + throw new Error('start must be >= zero'); + + this.pos = this.start; + } + + if (options.encoding) + this.setDefaultEncoding(options.encoding); + + if (!Buffer.isBuffer(this.handle)) + this.open(); + + // dispose on finish. + this.once('finish', function onclose() { + if (this.autoClose) + this.close(); + }); +} +inherits(WriteStream, WritableStream); + +WriteStream.prototype.open = function() { + var self = this; + this.sftp.open(this.path, this.flags, this.mode, function(er, handle) { + if (er) { + self.emit('error', er); + if (self.autoClose) { + self.destroyed = self.closed = true; + self.emit('close'); + } + return; + } + + self.handle = handle; + + self.sftp.fchmod(handle, self.mode, function tryAgain(err) { + if (err) { + // Try chmod() for sftp servers that may not support fchmod() for + // whatever reason + self.sftp.chmod(self.path, self.mode, function(err_) { + tryAgain(); + }); + return; + } + + // SFTPv3 requires absolute offsets, no matter the open flag used + if (self.flags[0] === 'a') { + self.sftp.fstat(handle, function tryStat(err, st) { + if (err) { + // Try stat() for sftp servers that may not support fstat() for + // whatever reason + self.sftp.stat(self.path, function(err_, st_) { + if (err_) { + self.destroy(); + self.emit('error', err); + return; + } + tryStat(null, st_); + }); + return; + } + + self.pos = st.size; + self.emit('open', handle); + }); + return; + } + self.emit('open', handle); + }); + }); +}; + +WriteStream.prototype._write = function(data, encoding, cb) { + if (!Buffer.isBuffer(data)) + return this.emit('error', new Error('Invalid data')); + + if (!Buffer.isBuffer(this.handle)) { + return this.once('open', function() { + this._write(data, encoding, cb); + }); + } + + var self = this; + this.sftp.writeData(this.handle, + data, + 0, + data.length, + this.pos, + function(er, bytes) { + if (er) { + if (self.autoClose) + self.destroy(); + return cb(er); + } + self.bytesWritten += bytes; + cb(); + }); + + this.pos += data.length; +}; + +WriteStream.prototype._writev = function(data, cb) { + if (!Buffer.isBuffer(this.handle)) { + return this.once('open', function() { + this._writev(data, cb); + }); + } + + var sftp = this.sftp; + var handle = this.handle; + var writesLeft = data.length; + var self = this; + + for (var i = 0; i < data.length; ++i) { + var chunk = data[i].chunk; + + sftp.writeData(handle, chunk, 0, chunk.length, this.pos, onwrite); + this.pos += chunk.length; + } + + function onwrite(er, bytes) { + if (er) { + self.destroy(); + return cb(er); + } + self.bytesWritten += bytes; + if (--writesLeft === 0) + cb(); + } +}; + +WriteStream.prototype.destroy = ReadStream.prototype.destroy; +WriteStream.prototype.close = ReadStream.prototype.close; + +// There is no shutdown() for files. +WriteStream.prototype.destroySoon = WriteStream.prototype.end; + + +module.exports = SFTPStream; + diff --git a/reverse_engineering/node_modules/ssh2-streams/lib/ssh.js b/reverse_engineering/node_modules/ssh2-streams/lib/ssh.js new file mode 100644 index 0000000..d70dc74 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/lib/ssh.js @@ -0,0 +1,5403 @@ +// TODO: * Automatic re-key every (configurable) n bytes or length of time +// - RFC suggests every 1GB of transmitted data or 1 hour, whichever +// comes sooner +// * Filter control codes from strings +// (as per http://tools.ietf.org/html/rfc4251#section-9.2) + +var crypto = require('crypto'); +var zlib = require('zlib'); +var TransformStream = require('stream').Transform; +var inherits = require('util').inherits; +var inspect = require('util').inspect; +var BUFFER_MAX_LEN = require('buffer').kMaxLength; + +var StreamSearch = require('streamsearch'); + +var consts = require('./constants'); +var utils = require('./utils'); +var isStreamCipher = utils.isStreamCipher; +var iv_inc = utils.iv_inc; +var readString = utils.readString; +var readInt = utils.readInt; +var DSASigBERToBare = utils.DSASigBERToBare; +var DSASigBareToBER = utils.DSASigBareToBER; +var ECDSASigSSHToASN1 = utils.ECDSASigSSHToASN1; +var ECDSASigASN1ToSSH = utils.ECDSASigASN1ToSSH; +var RSAKeySSHToASN1 = utils.RSAKeySSHToASN1; +var DSAKeySSHToASN1 = utils.DSAKeySSHToASN1; +var ECDSAKeySSHToASN1 = utils.ECDSAKeySSHToASN1; + +var MESSAGE = consts.MESSAGE; +var DYNAMIC_KEXDH_MESSAGE = consts.DYNAMIC_KEXDH_MESSAGE; +var KEXDH_MESSAGE = consts.KEXDH_MESSAGE; +var ALGORITHMS = consts.ALGORITHMS; +var DISCONNECT_REASON = consts.DISCONNECT_REASON; +var CHANNEL_OPEN_FAILURE = consts.CHANNEL_OPEN_FAILURE; +var SSH_TO_OPENSSL = consts.SSH_TO_OPENSSL; +var TERMINAL_MODE = consts.TERMINAL_MODE; +var SIGNALS = consts.SIGNALS; +var BUGS = consts.BUGS; +var BUGGY_IMPLS = consts.BUGGY_IMPLS; +var BUGGY_IMPLS_LEN = BUGGY_IMPLS.length; +var MODULE_VER = require('../package.json').version; +var I = 0; +var IN_INIT = I++; +var IN_GREETING = I++; +var IN_HEADER = I++; +var IN_PACKETBEFORE = I++; +var IN_PACKET = I++; +var IN_PACKETDATA = I++; +var IN_PACKETDATAVERIFY = I++; +var IN_PACKETDATAAFTER = I++; +var OUT_INIT = I++; +var OUT_READY = I++; +var OUT_REKEYING = I++; +var MAX_SEQNO = 4294967295; +var MAX_PACKET_SIZE = 35000; +var MAX_PACKETS_REKEYING = 50; +var EXP_TYPE_HEADER = 0; +var EXP_TYPE_LF = 1; +var EXP_TYPE_BYTES = 2; // Waits until n bytes have been seen +var Z_PARTIAL_FLUSH = zlib.Z_PARTIAL_FLUSH; +var ZLIB_OPTS = { flush: Z_PARTIAL_FLUSH }; + +var RE_KEX_HASH = /-(.+)$/; +var RE_GEX = /^gex-/; +var RE_NULL = /\x00/g; +var RE_GCM = /^aes\d+-gcm/i; + +var IDENT_PREFIX_BUFFER = new Buffer('SSH-'); +var EMPTY_BUFFER = new Buffer(0); +var PING_PACKET = new Buffer([ + MESSAGE.GLOBAL_REQUEST, + // "keepalive@openssh.com" + 0, 0, 0, 21, + 107, 101, 101, 112, 97, 108, 105, 118, 101, 64, 111, 112, 101, 110, 115, + 115, 104, 46, 99, 111, 109, + // Request a reply + 1 +]); +var NEWKEYS_PACKET = new Buffer([MESSAGE.NEWKEYS]); +var USERAUTH_SUCCESS_PACKET = new Buffer([MESSAGE.USERAUTH_SUCCESS]); +var REQUEST_SUCCESS_PACKET = new Buffer([MESSAGE.REQUEST_SUCCESS]); +var REQUEST_FAILURE_PACKET = new Buffer([MESSAGE.REQUEST_FAILURE]); +var NO_TERMINAL_MODES_BUFFER = new Buffer([TERMINAL_MODE.TTY_OP_END]); +var KEXDH_GEX_REQ_PACKET = new Buffer([ + MESSAGE.KEXDH_GEX_REQUEST, + // Minimal size in bits of an acceptable group + 0, 0, 4, 0, // 1024, modp2 + // Preferred size in bits of the group the server will send + 0, 0, 16, 0, // 4096, modp16 + // Maximal size in bits of an acceptable group + 0, 0, 32, 0 // 8192, modp18 +]); + +function DEBUG_NOOP(msg) {} + +function SSH2Stream(cfg) { + if (typeof cfg !== 'object' || cfg === null) + cfg = {}; + + TransformStream.call(this, { + highWaterMark: (typeof cfg.highWaterMark === 'number' + ? cfg.highWaterMark + : 32 * 1024) + }); + + this._needContinue = false; + this.bytesSent = this.bytesReceived = 0; + this.debug = (typeof cfg.debug === 'function' ? cfg.debug : DEBUG_NOOP); + this.server = (cfg.server === true); + this.maxPacketSize = (typeof cfg.maxPacketSize === 'number' + ? cfg.maxPacketSize + : MAX_PACKET_SIZE); + // Bitmap that indicates any bugs the remote side has. This is determined + // by the reported software version. + this.remoteBugs = 0; + + if (this.server) { + // TODO: Remove when we support group exchange for server implementation + this.remoteBugs = BUGS.BAD_DHGEX; + } + + var self = this; + + var hostKeys = cfg.hostKeys; + if (this.server && (typeof hostKeys !== 'object' || hostKeys === null)) + throw new Error('hostKeys must be an object keyed on host key type'); + + this.config = { + // Server + hostKeys: hostKeys, // All keys supported by server + + // Client/Server + ident: 'SSH-2.0-' + + (cfg.ident + || ('ssh2js' + MODULE_VER + (this.server ? 'srv' : ''))), + algorithms: { + kex: ALGORITHMS.KEX, + kexBuf: ALGORITHMS.KEX_BUF, + serverHostKey: ALGORITHMS.SERVER_HOST_KEY, + serverHostKeyBuf: ALGORITHMS.SERVER_HOST_KEY_BUF, + cipher: ALGORITHMS.CIPHER, + cipherBuf: ALGORITHMS.CIPHER_BUF, + hmac: ALGORITHMS.HMAC, + hmacBuf: ALGORITHMS.HMAC_BUF, + compress: ALGORITHMS.COMPRESS, + compressBuf: ALGORITHMS.COMPRESS_BUF + } + }; + // RFC 4253 states the identification string must not contain NULL + this.config.ident.replace(RE_NULL, ''); + + if (this.config.ident.length + 2 /* Account for "\r\n" */ > 255) + throw new Error('ident too long'); + + if (typeof cfg.algorithms === 'object' && cfg.algorithms !== null) { + var algos = cfg.algorithms; + if (Array.isArray(algos.kex) && algos.kex.length > 0) { + this.config.algorithms.kex = algos.kex; + if (!Buffer.isBuffer(algos.kexBuf)) + algos.kexBuf = new Buffer(algos.kex.join(','), 'ascii'); + this.config.algorithms.kexBuf = algos.kexBuf; + } + if (Array.isArray(algos.serverHostKey) && algos.serverHostKey.length > 0) { + this.config.algorithms.serverHostKey = algos.serverHostKey; + if (!Buffer.isBuffer(algos.serverHostKeyBuf)) { + algos.serverHostKeyBuf = new Buffer(algos.serverHostKey.join(','), + 'ascii'); + } + this.config.algorithms.serverHostKeyBuf = algos.serverHostKeyBuf; + } + if (Array.isArray(algos.cipher) && algos.cipher.length > 0) { + this.config.algorithms.cipher = algos.cipher; + if (!Buffer.isBuffer(algos.cipherBuf)) + algos.cipherBuf = new Buffer(algos.cipher.join(','), 'ascii'); + this.config.algorithms.cipherBuf = algos.cipherBuf; + } + if (Array.isArray(algos.hmac) && algos.hmac.length > 0) { + this.config.algorithms.hmac = algos.hmac; + if (!Buffer.isBuffer(algos.hmacBuf)) + algos.hmacBuf = new Buffer(algos.hmac.join(','), 'ascii'); + this.config.algorithms.hmacBuf = algos.hmacBuf; + } + if (Array.isArray(algos.compress) && algos.compress.length > 0) { + this.config.algorithms.compress = algos.compress; + if (!Buffer.isBuffer(algos.compressBuf)) + algos.compressBuf = new Buffer(algos.compress.join(','), 'ascii'); + this.config.algorithms.compressBuf = algos.compressBuf; + } + } + + this.reset(true); + + // Common events + this.on('end', function() { + // Let GC collect any Buffers we were previously storing + self._state = undefined; + self.reset(); + self._state.incoming.hmac.bufCompute = undefined; + self._state.outgoing.bufSeqno = undefined; + }); + this.on('DISCONNECT', function(reason, code, desc, lang) { + onDISCONNECT(self, reason, code, desc, lang); + }); + this.on('KEXINIT', function(init, firstFollows) { + onKEXINIT(self, init, firstFollows); + }); + this.on('NEWKEYS', function() { onNEWKEYS(self); }); + + if (this.server) { + // Server-specific events + this.on('KEXDH_INIT', function(e) { onKEXDH_INIT(self, e); }); + } else { + // Client-specific events + this.on('KEXDH_REPLY', function(info) { onKEXDH_REPLY(self, info); }) + .on('KEXDH_GEX_GROUP', + function(prime, gen) { onKEXDH_GEX_GROUP(self, prime, gen); }); + } + + if (this.server) { + // Greeting displayed before the ssh identification string is sent, this is + // usually ignored by most clients + if (typeof cfg.greeting === 'string' && cfg.greeting.length) { + if (cfg.greeting.slice(-2) === '\r\n') + this.push(cfg.greeting); + else + this.push(cfg.greeting + '\r\n'); + } + // Banner shown after the handshake completes, but before user + // authentication begins + if (typeof cfg.banner === 'string' && cfg.banner.length) { + if (cfg.banner.slice(-2) === '\r\n') + this.banner = cfg.banner; + else + this.banner = cfg.banner + '\r\n'; + } + } + this.debug('DEBUG: Local ident: ' + inspect(this.config.ident)); + this.push(this.config.ident + '\r\n'); + + this._state.incoming.expectedPacket = 'KEXINIT'; +} +inherits(SSH2Stream, TransformStream); + +SSH2Stream.prototype.__read = TransformStream.prototype._read; +SSH2Stream.prototype._read = function(n) { + if (this._needContinue) { + this._needContinue = false; + this.emit('continue'); + } + return this.__read(n); +}; +SSH2Stream.prototype.__push = TransformStream.prototype.push; +SSH2Stream.prototype.push = function(chunk, encoding) { + var ret = this.__push(chunk, encoding); + this._needContinue = (ret === false); + return ret; +}; + +SSH2Stream.prototype._cleanup = function(callback) { + this.reset(); + this.debug('DEBUG: Parser: Malformed packet'); + callback && callback(new Error('Malformed packet')); +}; + +SSH2Stream.prototype._transform = function(chunk, encoding, callback, decomp) { + var skipDecrypt = false; + var doDecryptGCM = false; + var state = this._state; + var instate = state.incoming; + var outstate = state.outgoing; + var expect = instate.expect; + var decrypt = instate.decrypt; + var decompress = instate.decompress; + var chlen = chunk.length; + var chleft = 0; + var debug = this.debug; + var self = this; + var i = 0; + var p = i; + var buffer; + var buf; + var r; + + this.bytesReceived += chlen; + + while (true) { + if (expect.type !== undefined) { + if (i >= chlen) + break; + if (expect.type === EXP_TYPE_BYTES) { + chleft = (chlen - i); + var pktLeft = (expect.buf.length - expect.ptr); + if (pktLeft <= chleft) { + chunk.copy(expect.buf, expect.ptr, i, i + pktLeft); + i += pktLeft; + buffer = expect.buf; + expect.buf = undefined; + expect.ptr = 0; + expect.type = undefined; + } else { + chunk.copy(expect.buf, expect.ptr, i); + expect.ptr += chleft; + i += chleft; + } + continue; + } else if (expect.type === EXP_TYPE_HEADER) { + i += instate.search.push(chunk); + if (expect.type !== undefined) + continue; + } else if (expect.type === EXP_TYPE_LF) { + if (++expect.ptr + 4 /* Account for "SSH-" */ > 255) { + this.reset(); + debug('DEBUG: Parser: Identification string exceeded 255 characters'); + return callback(new Error('Max identification string size exceeded')); + } + if (chunk[i] === 0x0A) { + expect.type = undefined; + if (p < i) { + if (expect.buf === undefined) + expect.buf = chunk.toString('ascii', p, i); + else + expect.buf += chunk.toString('ascii', p, i); + } + buffer = expect.buf; + expect.buf = undefined; + ++i; + } else { + if (++i === chlen && p < i) { + if (expect.buf === undefined) + expect.buf = chunk.toString('ascii', p, i); + else + expect.buf += chunk.toString('ascii', p, i); + } + continue; + } + } + } + + if (instate.status === IN_INIT) { + if (this.server) { + // Retrieve what should be the start of the protocol version exchange + if (!buffer) { + debug('DEBUG: Parser: IN_INIT (waiting for identification begin)'); + expectData(this, EXP_TYPE_BYTES, 4); + } else { + if (buffer[0] === 0x53 // S + && buffer[1] === 0x53 // S + && buffer[2] === 0x48 // H + && buffer[3] === 0x2D) { // - + instate.status = IN_GREETING; + debug('DEBUG: Parser: IN_INIT (waiting for rest of identification)'); + } else { + this.reset(); + debug('DEBUG: Parser: Bad identification start'); + return callback(new Error('Bad identification start')); + } + } + } else { + debug('DEBUG: Parser: IN_INIT'); + // Retrieve any bytes that may come before the protocol version exchange + var ss = instate.search = new StreamSearch(IDENT_PREFIX_BUFFER); + ss.on('info', function onInfo(matched, data, start, end) { + if (data) { + if (instate.greeting === undefined) + instate.greeting = data.toString('binary', start, end); + else + instate.greeting += data.toString('binary', start, end); + } + if (matched) { + expect.type = undefined; + instate.search.removeListener('info', onInfo); + } + }); + ss.maxMatches = 1; + expectData(this, EXP_TYPE_HEADER); + instate.status = IN_GREETING; + } + } else if (instate.status === IN_GREETING) { + debug('DEBUG: Parser: IN_GREETING'); + instate.search = undefined; + // Retrieve the identification bytes after the "SSH-" header + p = i; + expectData(this, EXP_TYPE_LF); + instate.status = IN_HEADER; + } else if (instate.status === IN_HEADER) { + debug('DEBUG: Parser: IN_HEADER'); + if (buffer.charCodeAt(buffer.length - 1) === 13) + buffer = buffer.slice(0, -1); + var idxDash = buffer.indexOf('-'); + var idxSpace = buffer.indexOf(' '); + var header = { + // RFC says greeting SHOULD be utf8 + greeting: instate.greeting, + identRaw: 'SSH-' + buffer, + versions: { + protocol: buffer.substr(0, idxDash), + software: (idxSpace === -1 + ? buffer.substring(idxDash + 1) + : buffer.substring(idxDash + 1, idxSpace)) + }, + comments: (idxSpace > -1 ? buffer.substring(idxSpace + 1) : undefined) + }; + instate.greeting = undefined; + + if (header.versions.protocol !== '1.99' + && header.versions.protocol !== '2.0') { + this.reset(); + debug('DEBUG: Parser: protocol version not supported: ' + + header.versions.protocol); + return callback(new Error('Protocol version not supported')); + } else + this.emit('header', header); + + if (instate.status === IN_INIT) { + // We reset from an event handler, possibly due to an unsupported SSH + // protocol version? + return; + } + + var identRaw = header.identRaw; + var software = header.versions.software; + this.debug('DEBUG: Remote ident: ' + inspect(identRaw)); + for (var j = 0, rule; j < BUGGY_IMPLS_LEN; ++j) { + rule = BUGGY_IMPLS[j]; + if (typeof rule[0] === 'string') { + if (software === rule[0]) + this.remoteBugs |= rule[1]; + } else if (rule[0].test(software)) + this.remoteBugs |= rule[1]; + } + instate.identRaw = identRaw; + // Adjust bytesReceived first otherwise it will have an incorrectly larger + // total when we call back into this function after completing KEXINIT + this.bytesReceived -= (chlen - i); + KEXINIT(this, function() { + if (i === chlen) + callback(); + else + self._transform(chunk.slice(i), encoding, callback); + }); + instate.status = IN_PACKETBEFORE; + return; + } else if (instate.status === IN_PACKETBEFORE) { + debug('DEBUG: Parser: IN_PACKETBEFORE (expecting ' + decrypt.size + ')'); + // Wait for the right number of bytes so we can determine the incoming + // packet length + expectData(this, EXP_TYPE_BYTES, decrypt.size, decrypt.buf); + instate.status = IN_PACKET; + } else if (instate.status === IN_PACKET) { + debug('DEBUG: Parser: IN_PACKET'); + doDecryptGCM = (decrypt.instance && decrypt.isGCM); + if (decrypt.instance && !decrypt.isGCM) + buffer = decryptData(this, buffer); + + r = readInt(buffer, 0, this, callback); + if (r === false) + return; + var macSize = (instate.hmac.size || 0); + var fullPacketLen = r + 4 + macSize; + var maxPayloadLen = this.maxPacketSize; + if (decompress.instance) { + // Account for compressed payloads + // This formula is taken from dropbear which derives it from zlib's + // documentation. Explanation from dropbear: + /* For exact details see http://www.zlib.net/zlib_tech.html + * 5 bytes per 16kB block, plus 6 bytes for the stream. + * We might allocate 5 unnecessary bytes here if it's an + * exact multiple. */ + maxPayloadLen += (((this.maxPacketSize / 16384) + 1) * 5 + 6); + } + if (r > maxPayloadLen + // TODO: Change 16 to "MAX(16, decrypt.size)" when/if SSH2 adopts + // 512-bit ciphers + || fullPacketLen < (16 + macSize) + || ((r + (doDecryptGCM ? 0 : 4)) % decrypt.size) !== 0) { + this.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + debug('DEBUG: Parser: Bad packet length (' + fullPacketLen + ')'); + return callback(new Error('Bad packet length')); + } + + instate.pktLen = r; + var remainLen = instate.pktLen + 4 - decrypt.size; + if (doDecryptGCM) { + decrypt.instance.setAAD(buffer.slice(0, 4)); + debug('DEBUG: Parser: pktLen:' + + instate.pktLen + + ',remainLen:' + + remainLen); + } else { + instate.padLen = buffer[4]; + debug('DEBUG: Parser: pktLen:' + + instate.pktLen + + ',padLen:' + + instate.padLen + + ',remainLen:' + + remainLen); + } + if (remainLen > 0) { + if (doDecryptGCM) + instate.pktExtra = buffer.slice(4); + else + instate.pktExtra = buffer.slice(5); + // Grab the rest of the packet + expectData(this, EXP_TYPE_BYTES, remainLen); + instate.status = IN_PACKETDATA; + } else if (remainLen < 0) + instate.status = IN_PACKETBEFORE; + else { + // Entire message fit into one block + skipDecrypt = true; + instate.status = IN_PACKETDATA; + continue; + } + } else if (instate.status === IN_PACKETDATA) { + debug('DEBUG: Parser: IN_PACKETDATA'); + doDecryptGCM = (decrypt.instance && decrypt.isGCM); + if (decrypt.instance && !skipDecrypt && !doDecryptGCM) + buffer = decryptData(this, buffer); + else if (skipDecrypt) + skipDecrypt = false; + var padStart = instate.pktLen - instate.padLen - 1; + // TODO: Allocate a Buffer once that is slightly larger than maxPacketSize + // (to accommodate for packet length field and MAC) and re-use that + // instead + if (instate.pktExtra) { + buf = new Buffer(instate.pktExtra.length + buffer.length); + instate.pktExtra.copy(buf); + buffer.copy(buf, instate.pktExtra.length); + instate.payload = buf.slice(0, padStart); + } else { + // Entire message fit into one block + if (doDecryptGCM) + buf = buffer.slice(4); + else + buf = buffer.slice(5); + instate.payload = buffer.slice(5, 5 + padStart); + } + if (instate.hmac.size !== undefined) { + // Wait for hmac hash + debug('DEBUG: Parser: HMAC size:' + instate.hmac.size); + expectData(this, EXP_TYPE_BYTES, instate.hmac.size, instate.hmac.buf); + instate.status = IN_PACKETDATAVERIFY; + instate.packet = buf; + } else + instate.status = IN_PACKETDATAAFTER; + instate.pktExtra = undefined; + buf = undefined; + } else if (instate.status === IN_PACKETDATAVERIFY) { + debug('DEBUG: Parser: IN_PACKETDATAVERIFY'); + // Verify packet data integrity + if (hmacVerify(this, buffer)) { + debug('DEBUG: Parser: IN_PACKETDATAVERIFY (Valid HMAC)'); + instate.status = IN_PACKETDATAAFTER; + instate.packet = undefined; + } else { + this.reset(); + debug('DEBUG: Parser: IN_PACKETDATAVERIFY (Invalid HMAC)'); + return callback(new Error('Invalid HMAC')); + } + } else if (instate.status === IN_PACKETDATAAFTER) { + if (decompress.instance) { + if (!decomp) { + debug('DEBUG: Parser: Decompressing'); + decompress.instance.write(instate.payload); + var decompBuf = []; + var decompBufLen = 0; + decompress.instance.on('readable', function() { + var buf; + while (buf = this.read()) { + decompBuf.push(buf); + decompBufLen += buf.length; + } + }).flush(Z_PARTIAL_FLUSH, function() { + decompress.instance.removeAllListeners('readable'); + if (decompBuf.length === 1) + instate.payload = decompBuf[0]; + else + instate.payload = Buffer.concat(decompBuf, decompBufLen); + decompBuf = null; + var nextSlice; + if (i === chlen) + nextSlice = EMPTY_BUFFER; // Avoid slicing a zero-length buffer + else + nextSlice = chunk.slice(i); + self._transform(nextSlice, encoding, callback, true); + }); + return; + } else { + // Make sure we reset this after this first time in the loop, + // otherwise we could end up trying to interpret as-is another + // compressed packet that is within the same chunk + decomp = false; + } + } + + this.emit('packet'); + + var ptype = instate.payload[0]; + + if (debug !== DEBUG_NOOP) { + var msgPacket = 'DEBUG: Parser: IN_PACKETDATAAFTER, packet: '; + var kexdh = state.kexdh; + var authMethod = state.authsQueue[0]; + var msgPktType = null; + + if (outstate.status === OUT_REKEYING + && !(ptype <= 4 || (ptype >= 20 && ptype <= 49))) + msgPacket += '(enqueued) '; + + if (ptype === MESSAGE.KEXDH_INIT) { + if (kexdh === 'group') + msgPktType = 'KEXDH_INIT'; + else if (kexdh[0] === 'e') + msgPktType = 'KEXECDH_INIT'; + else + msgPktType = 'KEXDH_GEX_REQUEST'; + } else if (ptype === MESSAGE.KEXDH_REPLY) { + if (kexdh === 'group') + msgPktType = 'KEXDH_REPLY'; + else if (kexdh[0] === 'e') + msgPktType = 'KEXECDH_REPLY'; + else + msgPktType = 'KEXDH_GEX_GROUP'; + } else if (ptype === MESSAGE.KEXDH_GEX_GROUP) + msgPktType = 'KEXDH_GEX_GROUP'; + else if (ptype === MESSAGE.KEXDH_GEX_REPLY) + msgPktType = 'KEXDH_GEX_REPLY'; + else if (ptype === 60) { + if (authMethod === 'password') + msgPktType = 'USERAUTH_PASSWD_CHANGEREQ'; + else if (authMethod === 'keyboard-interactive') + msgPktType = 'USERAUTH_INFO_REQUEST'; + else if (authMethod === 'publickey') + msgPktType = 'USERAUTH_PK_OK'; + else + msgPktType = 'UNKNOWN PACKET 60'; + } else if (ptype === 61) { + if (authMethod === 'keyboard-interactive') + msgPktType = 'USERAUTH_INFO_RESPONSE'; + else + msgPktType = 'UNKNOWN PACKET 61'; + } + + if (msgPktType === null) + msgPktType = MESSAGE[ptype]; + + // Don't write debug output for messages we custom make in parsePacket() + if (ptype !== MESSAGE.CHANNEL_OPEN + && ptype !== MESSAGE.CHANNEL_REQUEST + && ptype !== MESSAGE.CHANNEL_SUCCESS + && ptype !== MESSAGE.CHANNEL_FAILURE + && ptype !== MESSAGE.CHANNEL_EOF + && ptype !== MESSAGE.CHANNEL_CLOSE + && ptype !== MESSAGE.CHANNEL_DATA + && ptype !== MESSAGE.CHANNEL_EXTENDED_DATA + && ptype !== MESSAGE.CHANNEL_WINDOW_ADJUST + && ptype !== MESSAGE.DISCONNECT + && ptype !== MESSAGE.USERAUTH_REQUEST + && ptype !== MESSAGE.GLOBAL_REQUEST) + debug(msgPacket + msgPktType); + } + + // Only parse packet if we are not re-keying or the packet is not a + // transport layer packet needed for re-keying + if (outstate.status === OUT_READY + || ptype <= 4 + || (ptype >= 20 && ptype <= 49)) { + if (parsePacket(this, callback) === false) + return; + + if (instate.status === IN_INIT) { + // We were reset due to some error/disagreement ? + return; + } + } else if (outstate.status === OUT_REKEYING) { + if (instate.rekeyQueue.length === MAX_PACKETS_REKEYING) { + debug('DEBUG: Parser: Max incoming re-key queue length reached'); + this.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + return callback( + new Error('Incoming re-key queue length limit reached') + ); + } + + // Make sure to record the sequence number in case we need it later on + // when we drain the queue (e.g. unknown packet) + var seqno = instate.seqno; + if (++instate.seqno > MAX_SEQNO) + instate.seqno = 0; + + instate.rekeyQueue.push(seqno, instate.payload); + } + + instate.status = IN_PACKETBEFORE; + instate.payload = undefined; + } + if (buffer !== undefined) + buffer = undefined; + } + + callback(); +}; + +SSH2Stream.prototype.reset = function(noend) { + if (this._state) { + var state = this._state; + state.incoming.status = IN_INIT; + state.outgoing.status = OUT_INIT; + } else { + this._state = { + authsQueue: [], + hostkeyFormat: undefined, + kex: undefined, + kexdh: undefined, + + incoming: { + status: IN_INIT, + expectedPacket: undefined, + search: undefined, + greeting: undefined, + seqno: 0, + pktLen: undefined, + padLen: undefined, + pktExtra: undefined, + payload: undefined, + packet: undefined, + kexinit: undefined, + identRaw: undefined, + rekeyQueue: [], + ignoreNext: false, + + expect: { + amount: undefined, + type: undefined, + ptr: 0, + buf: undefined + }, + + decrypt: { + instance: false, + size: 8, + isGCM: false, + iv: undefined, // GCM + key: undefined, // GCM + buf: undefined, + type: undefined + }, + + hmac: { + size: undefined, + key: undefined, + buf: undefined, + bufCompute: new Buffer(9), + type: false + }, + + decompress: { + instance: false, + type: false + } + }, + + outgoing: { + status: OUT_INIT, + seqno: 0, + bufSeqno: new Buffer(4), + rekeyQueue: [], + kexinit: undefined, + kexsecret: undefined, + pubkey: undefined, + exchangeHash: undefined, + sessionId: undefined, + sentNEWKEYS: false, + + encrypt: { + instance: false, + size: 8, + isGCM: false, + iv: undefined, // GCM + key: undefined, // GCM + type: undefined + }, + + hmac: { + size: undefined, + key: undefined, + buf: undefined, + type: false + }, + + compress: { + instance: false, + type: false + } + } + }; + } + if (!noend) { + if (this.readable) + this.push(null); + } +}; + +// Common methods +// Global +SSH2Stream.prototype.disconnect = function(reason) { + /* + byte SSH_MSG_DISCONNECT + uint32 reason code + string description in ISO-10646 UTF-8 encoding + string language tag + */ + var buf = new Buffer(1 + 4 + 4 + 4); + + buf.fill(0); + buf[0] = MESSAGE.DISCONNECT; + + if (DISCONNECT_REASON[reason] === undefined) + reason = DISCONNECT_REASON.BY_APPLICATION; + buf.writeUInt32BE(reason, 1, true); + + this.debug('DEBUG: Outgoing: Writing DISCONNECT (' + + DISCONNECT_REASON[reason] + + ')'); + send(this, buf); + this.reset(); + + return false; +}; +SSH2Stream.prototype.ping = function() { + this.debug('DEBUG: Outgoing: Writing ping (GLOBAL_REQUEST: keepalive@openssh.com)'); + return send(this, PING_PACKET); +}; +SSH2Stream.prototype.rekey = function() { + var status = this._state.outgoing.status; + if (status === OUT_REKEYING) + throw new Error('A re-key is already in progress'); + else if (status !== OUT_READY) + throw new Error('Cannot re-key yet'); + + this.debug('DEBUG: Outgoing: Starting re-key'); + return KEXINIT(this); +}; + +// 'ssh-connection' service-specific +SSH2Stream.prototype.requestSuccess = function(data) { + var buf; + if (Buffer.isBuffer(data)) { + buf = new Buffer(1 + data.length); + + buf[0] = MESSAGE.REQUEST_SUCCESS; + + data.copy(buf, 1); + } else + buf = REQUEST_SUCCESS_PACKET; + + this.debug('DEBUG: Outgoing: Writing REQUEST_SUCCESS'); + return send(this, buf); +}; +SSH2Stream.prototype.requestFailure = function() { + this.debug('DEBUG: Outgoing: Writing REQUEST_FAILURE'); + return send(this, REQUEST_FAILURE_PACKET); +}; +SSH2Stream.prototype.channelSuccess = function(chan) { + // Does not consume window space + var buf = new Buffer(1 + 4); + + buf[0] = MESSAGE.CHANNEL_SUCCESS; + + buf.writeUInt32BE(chan, 1, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_SUCCESS (' + chan + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelFailure = function(chan) { + // Does not consume window space + var buf = new Buffer(1 + 4); + + buf[0] = MESSAGE.CHANNEL_FAILURE; + + buf.writeUInt32BE(chan, 1, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_FAILURE (' + chan + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelEOF = function(chan) { + // Does not consume window space + var buf = new Buffer(1 + 4); + + buf[0] = MESSAGE.CHANNEL_EOF; + + buf.writeUInt32BE(chan, 1, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_EOF (' + chan + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelClose = function(chan) { + // Does not consume window space + var buf = new Buffer(1 + 4); + + buf[0] = MESSAGE.CHANNEL_CLOSE; + + buf.writeUInt32BE(chan, 1, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_CLOSE (' + chan + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelWindowAdjust = function(chan, amount) { + // Does not consume window space + var buf = new Buffer(1 + 4 + 4); + + buf[0] = MESSAGE.CHANNEL_WINDOW_ADJUST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(amount, 5, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_WINDOW_ADJUST (' + + chan + + ', ' + + amount + + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelData = function(chan, data) { + var dataIsBuffer = Buffer.isBuffer(data); + var dataLen = (dataIsBuffer ? data.length : Buffer.byteLength(data)); + var buf = new Buffer(1 + 4 + 4 + dataLen); + + buf[0] = MESSAGE.CHANNEL_DATA; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(dataLen, 5, true); + if (dataIsBuffer) + data.copy(buf, 9); + else + buf.write(data, 9, dataLen, 'utf8'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_DATA (' + chan + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelExtData = function(chan, data, type) { + var dataIsBuffer = Buffer.isBuffer(data); + var dataLen = (dataIsBuffer ? data.length : Buffer.byteLength(data)); + var buf = new Buffer(1 + 4 + 4 + 4 + dataLen); + + buf[0] = MESSAGE.CHANNEL_EXTENDED_DATA; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(type, 5, true); + + buf.writeUInt32BE(dataLen, 9, true); + if (dataIsBuffer) + data.copy(buf, 13); + else + buf.write(data, 13, dataLen, 'utf8'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_EXTENDED_DATA (' + chan + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelOpenConfirm = function(remoteChan, localChan, + initWindow, maxPacket) { + var buf = new Buffer(1 + 4 + 4 + 4 + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN_CONFIRMATION; + + buf.writeUInt32BE(remoteChan, 1, true); + + buf.writeUInt32BE(localChan, 5, true); + + buf.writeUInt32BE(initWindow, 9, true); + + buf.writeUInt32BE(maxPacket, 13, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN_CONFIRMATION (r:' + + remoteChan + + ', l:' + + localChan + + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.channelOpenFail = function(remoteChan, reason, desc, + lang) { + if (typeof desc !== 'string') + desc = ''; + if (typeof lang !== 'string') + lang = ''; + + var descLen = Buffer.byteLength(desc); + var langLen = Buffer.byteLength(lang); + var p = 9; + var buf = new Buffer(1 + 4 + 4 + 4 + descLen + 4 + langLen); + + buf[0] = MESSAGE.CHANNEL_OPEN_FAILURE; + + buf.writeUInt32BE(remoteChan, 1, true); + + buf.writeUInt32BE(reason, 5, true); + + buf.writeUInt32BE(descLen, p, true); + p += 4; + if (descLen) { + buf.write(desc, p, descLen, 'utf8'); + p += descLen; + } + + buf.writeUInt32BE(langLen, p, true); + if (langLen) + buf.write(lang, p += 4, langLen, 'ascii'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN_FAILURE (' + + remoteChan + + ')'); + return send(this, buf); +}; + +// Client-specific methods +// Global +SSH2Stream.prototype.service = function(svcName) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var svcNameLen = Buffer.byteLength(svcName); + var buf = new Buffer(1 + 4 + svcNameLen); + + buf[0] = MESSAGE.SERVICE_REQUEST; + + buf.writeUInt32BE(svcNameLen, 1, true); + buf.write(svcName, 5, svcNameLen, 'ascii'); + + this.debug('DEBUG: Outgoing: Writing SERVICE_REQUEST (' + svcName + ')'); + return send(this, buf); +}; +// 'ssh-connection' service-specific +SSH2Stream.prototype.tcpipForward = function(bindAddr, bindPort, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var addrlen = Buffer.byteLength(bindAddr); + var buf = new Buffer(1 + 4 + 13 + 1 + 4 + addrlen + 4); + + buf[0] = MESSAGE.GLOBAL_REQUEST; + + buf.writeUInt32BE(13, 1, true); + buf.write('tcpip-forward', 5, 13, 'ascii'); + + buf[18] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(addrlen, 19, true); + buf.write(bindAddr, 23, addrlen, 'ascii'); + + buf.writeUInt32BE(bindPort, 23 + addrlen, true); + + this.debug('DEBUG: Outgoing: Writing GLOBAL_REQUEST (tcpip-forward)'); + return send(this, buf); +}; +SSH2Stream.prototype.cancelTcpipForward = function(bindAddr, bindPort, + wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var addrlen = Buffer.byteLength(bindAddr); + var buf = new Buffer(1 + 4 + 20 + 1 + 4 + addrlen + 4); + + buf[0] = MESSAGE.GLOBAL_REQUEST; + + buf.writeUInt32BE(20, 1, true); + buf.write('cancel-tcpip-forward', 5, 20, 'ascii'); + + buf[25] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(addrlen, 26, true); + buf.write(bindAddr, 30, addrlen, 'ascii'); + + buf.writeUInt32BE(bindPort, 30 + addrlen, true); + + this.debug('DEBUG: Outgoing: Writing GLOBAL_REQUEST (cancel-tcpip-forward)'); + return send(this, buf); +}; +SSH2Stream.prototype.openssh_streamLocalForward = function(socketPath, + wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var pathlen = Buffer.byteLength(socketPath); + var buf = new Buffer(1 + 4 + 31 + 1 + 4 + pathlen); + + buf[0] = MESSAGE.GLOBAL_REQUEST; + + buf.writeUInt32BE(31, 1, true); + buf.write('streamlocal-forward@openssh.com', 5, 31, 'ascii'); + + buf[36] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(pathlen, 37, true); + buf.write(socketPath, 41, pathlen, 'utf8'); + + this.debug('DEBUG: Outgoing: Writing GLOBAL_REQUEST (streamlocal-forward@openssh.com)'); + return send(this, buf); +}; +SSH2Stream.prototype.openssh_cancelStreamLocalForward = function(socketPath, + wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var pathlen = Buffer.byteLength(socketPath); + var buf = new Buffer(1 + 4 + 38 + 1 + 4 + pathlen); + + buf[0] = MESSAGE.GLOBAL_REQUEST; + + buf.writeUInt32BE(38, 1, true); + buf.write('cancel-streamlocal-forward@openssh.com', 5, 38, 'ascii'); + + buf[43] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(pathlen, 44, true); + buf.write(socketPath, 48, pathlen, 'utf8'); + + this.debug('DEBUG: Outgoing: Writing GLOBAL_REQUEST (cancel-streamlocal-forward@openssh.com)'); + return send(this, buf); +}; +SSH2Stream.prototype.directTcpip = function(chan, initWindow, maxPacket, cfg) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var srclen = Buffer.byteLength(cfg.srcIP); + var dstlen = Buffer.byteLength(cfg.dstIP); + var p = 29; + var buf = new Buffer(1 + 4 + 12 + 4 + 4 + 4 + 4 + srclen + 4 + 4 + dstlen + + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN; + + buf.writeUInt32BE(12, 1, true); + buf.write('direct-tcpip', 5, 12, 'ascii'); + + buf.writeUInt32BE(chan, 17, true); + + buf.writeUInt32BE(initWindow, 21, true); + + buf.writeUInt32BE(maxPacket, 25, true); + + buf.writeUInt32BE(dstlen, p, true); + buf.write(cfg.dstIP, p += 4, dstlen, 'ascii'); + + buf.writeUInt32BE(cfg.dstPort, p += dstlen, true); + + buf.writeUInt32BE(srclen, p += 4, true); + buf.write(cfg.srcIP, p += 4, srclen, 'ascii'); + + buf.writeUInt32BE(cfg.srcPort, p += srclen, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN (' + + chan + + ', direct-tcpip)'); + return send(this, buf); +}; +SSH2Stream.prototype.openssh_directStreamLocal = function(chan, initWindow, + maxPacket, cfg) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var pathlen = Buffer.byteLength(cfg.socketPath); + var p = 47; + var buf = new Buffer(1 + 4 + 30 + 4 + 4 + 4 + 4 + pathlen + 4 + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN; + + buf.writeUInt32BE(30, 1, true); + buf.write('direct-streamlocal@openssh.com', 5, 30, 'ascii'); + + buf.writeUInt32BE(chan, 35, true); + + buf.writeUInt32BE(initWindow, 39, true); + + buf.writeUInt32BE(maxPacket, 43, true); + + buf.writeUInt32BE(pathlen, p, true); + buf.write(cfg.socketPath, p += 4, pathlen, 'utf8'); + + // reserved fields (string and uint32) + buf.fill(0, buf.length - 8); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN (' + + chan + + ', direct-streamlocal@openssh.com)'); + return send(this, buf); +}; +SSH2Stream.prototype.openssh_noMoreSessions = function(wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var buf = new Buffer(1 + 4 + 28 + 1); + + buf[0] = MESSAGE.GLOBAL_REQUEST; + + buf.writeUInt32BE(28, 1, true); + buf.write('no-more-sessions@openssh.com', 5, 28, 'ascii'); + + buf[33] = (wantReply === undefined || wantReply === true ? 1 : 0); + + this.debug('DEBUG: Outgoing: Writing GLOBAL_REQUEST (no-more-sessions@openssh.com)'); + return send(this, buf); +}; +SSH2Stream.prototype.session = function(chan, initWindow, maxPacket) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var buf = new Buffer(1 + 4 + 7 + 4 + 4 + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN; + + buf.writeUInt32BE(7, 1, true); + buf.write('session', 5, 7, 'ascii'); + + buf.writeUInt32BE(chan, 12, true); + + buf.writeUInt32BE(initWindow, 16, true); + + buf.writeUInt32BE(maxPacket, 20, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN (' + + chan + + ', session)'); + return send(this, buf); +}; +SSH2Stream.prototype.windowChange = function(chan, rows, cols, height, width) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var buf = new Buffer(1 + 4 + 4 + 13 + 1 + 4 + 4 + 4 + 4); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(13, 5, true); + buf.write('window-change', 9, 13, 'ascii'); + + buf[22] = 0; + + buf.writeUInt32BE(cols, 23, true); + + buf.writeUInt32BE(rows, 27, true); + + buf.writeUInt32BE(width, 31, true); + + buf.writeUInt32BE(height, 35, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', window-change)'); + return send(this, buf); +}; +SSH2Stream.prototype.pty = function(chan, rows, cols, height, + width, term, modes, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + if (!term || !term.length) + term = 'vt100'; + if (modes + && !Buffer.isBuffer(modes) + && !Array.isArray(modes) + && typeof modes === 'object') + modes = modesToBytes(modes); + if (!modes || !modes.length) + modes = NO_TERMINAL_MODES_BUFFER; + + var termLen = term.length; + var modesLen = modes.length; + var p = 21; + var buf = new Buffer(1 + 4 + 4 + 7 + 1 + 4 + termLen + 4 + 4 + 4 + 4 + 4 + + modesLen); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(7, 5, true); + buf.write('pty-req', 9, 7, 'ascii'); + + buf[16] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(termLen, 17, true); + buf.write(term, 21, termLen, 'utf8'); + + buf.writeUInt32BE(cols, p += termLen, true); + + buf.writeUInt32BE(rows, p += 4, true); + + buf.writeUInt32BE(width, p += 4, true); + + buf.writeUInt32BE(height, p += 4, true); + + buf.writeUInt32BE(modesLen, p += 4, true); + p += 4; + if (Array.isArray(modes)) { + for (var i = 0; i < modesLen; ++i) + buf[p++] = modes[i]; + } else if (Buffer.isBuffer(modes)) { + modes.copy(buf, p); + } + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', pty-req)'); + return send(this, buf); +}; +SSH2Stream.prototype.shell = function(chan, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var buf = new Buffer(1 + 4 + 4 + 5 + 1); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(5, 5, true); + buf.write('shell', 9, 5, 'ascii'); + + buf[14] = (wantReply === undefined || wantReply === true ? 1 : 0); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', shell)'); + return send(this, buf); +}; +SSH2Stream.prototype.exec = function(chan, cmd, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var cmdlen = (Buffer.isBuffer(cmd) ? cmd.length : Buffer.byteLength(cmd)); + var buf = new Buffer(1 + 4 + 4 + 4 + 1 + 4 + cmdlen); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(4, 5, true); + buf.write('exec', 9, 4, 'ascii'); + + buf[13] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(cmdlen, 14, true); + if (Buffer.isBuffer(cmd)) + cmd.copy(buf, 18); + else + buf.write(cmd, 18, cmdlen, 'utf8'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', exec)'); + return send(this, buf); +}; +SSH2Stream.prototype.signal = function(chan, signal) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + signal = signal.toUpperCase(); + if (signal.slice(0, 3) === 'SIG') + signal = signal.substring(3); + + if (SIGNALS.indexOf(signal) === -1) + throw new Error('Invalid signal: ' + signal); + + var signalLen = signal.length; + var buf = new Buffer(1 + 4 + 4 + 6 + 1 + 4 + signalLen); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(6, 5, true); + buf.write('signal', 9, 6, 'ascii'); + + buf[15] = 0; + + buf.writeUInt32BE(signalLen, 16, true); + buf.write(signal, 20, signalLen, 'ascii'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', signal)'); + return send(this, buf); +}; +SSH2Stream.prototype.env = function(chan, key, val, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var keyLen = Buffer.byteLength(key); + var valLen = (Buffer.isBuffer(val) ? val.length : Buffer.byteLength(val)); + var buf = new Buffer(1 + 4 + 4 + 3 + 1 + 4 + keyLen + 4 + valLen); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(3, 5, true); + buf.write('env', 9, 3, 'ascii'); + + buf[12] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(keyLen, 13, true); + buf.write(key, 17, keyLen, 'ascii'); + + buf.writeUInt32BE(valLen, 17 + keyLen, true); + if (Buffer.isBuffer(val)) + val.copy(buf, 17 + keyLen + 4); + else + buf.write(val, 17 + keyLen + 4, valLen, 'utf8'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', env)'); + return send(this, buf); +}; +SSH2Stream.prototype.x11Forward = function(chan, cfg, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var protolen = Buffer.byteLength(cfg.protocol); + var cookielen = Buffer.byteLength(cfg.cookie); + var buf = new Buffer(1 + 4 + 4 + 7 + 1 + 1 + 4 + protolen + 4 + cookielen + + 4); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(7, 5, true); + buf.write('x11-req', 9, 7, 'ascii'); + + buf[16] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf[17] = (cfg.single ? 1 : 0); + + buf.writeUInt32BE(protolen, 18, true); + var bp = 22; + if (Buffer.isBuffer(cfg.protocol)) + cfg.protocol.copy(buf, bp); + else + buf.write(cfg.protocol, bp, protolen, 'utf8'); + bp += protolen; + + buf.writeUInt32BE(cookielen, bp, true); + bp += 4; + if (Buffer.isBuffer(cfg.cookie)) + cfg.cookie.copy(buf, bp); + else + buf.write(cfg.cookie, bp, cookielen, 'utf8'); + bp += cookielen; + + buf.writeUInt32BE((cfg.screen || 0), bp, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', x11-req)'); + return send(this, buf); +}; +SSH2Stream.prototype.subsystem = function(chan, name, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var nameLen = Buffer.byteLength(name); + var buf = new Buffer(1 + 4 + 4 + 9 + 1 + 4 + nameLen); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(9, 5, true); + buf.write('subsystem', 9, 9, 'ascii'); + + buf[18] = (wantReply === undefined || wantReply === true ? 1 : 0); + + buf.writeUInt32BE(nameLen, 19, true); + buf.write(name, 23, nameLen, 'ascii'); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', subsystem: ' + + name + + ')'); + return send(this, buf); +}; +SSH2Stream.prototype.openssh_agentForward = function(chan, wantReply) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + // Does not consume window space + var buf = new Buffer(1 + 4 + 4 + 26 + 1); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(26, 5, true); + buf.write('auth-agent-req@openssh.com', 9, 26, 'ascii'); + + buf[35] = (wantReply === undefined || wantReply === true ? 1 : 0); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', auth-agent-req@openssh.com)'); + return send(this, buf); +}; +// 'ssh-userauth' service-specific +SSH2Stream.prototype.authPassword = function(username, password) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var userLen = Buffer.byteLength(username); + var passLen = Buffer.byteLength(password); + var p = 0; + var buf = new Buffer(1 + + 4 + userLen + + 4 + 14 // "ssh-connection" + + 4 + 8 // "password" + + 1 + + 4 + passLen); + + buf[p] = MESSAGE.USERAUTH_REQUEST; + + buf.writeUInt32BE(userLen, ++p, true); + buf.write(username, p += 4, userLen, 'utf8'); + + buf.writeUInt32BE(14, p += userLen, true); + buf.write('ssh-connection', p += 4, 14, 'ascii'); + + buf.writeUInt32BE(8, p += 14, true); + buf.write('password', p += 4, 8, 'ascii'); + + buf[p += 8] = 0; + + buf.writeUInt32BE(passLen, ++p, true); + buf.write(password, p += 4, passLen, 'utf8'); + + this._state.authsQueue.push('password'); + this.debug('DEBUG: Outgoing: Writing USERAUTH_REQUEST (password)'); + return send(this, buf); +}; +SSH2Stream.prototype.authPK = function(username, pubKey, cbSign) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var self = this; + var outstate = this._state.outgoing; + var pubKeyFullType; + + if (pubKey.public) { + pubKeyFullType = pubKey.fulltype; + pubKey = pubKey.public; + } else { + pubKeyFullType = pubKey.toString('ascii', + 4, + 4 + pubKey.readUInt32BE(0, true)); + } + + var userLen = Buffer.byteLength(username); + var algoLen = Buffer.byteLength(pubKeyFullType); + var pubKeyLen = pubKey.length; + var sesLen = outstate.sessionId.length; + var p = 0; + var buf = new Buffer((cbSign ? 4 + sesLen : 0) + + 1 + + 4 + userLen + + 4 + 14 // "ssh-connection" + + 4 + 9 // "publickey" + + 1 + + 4 + algoLen + + 4 + pubKeyLen + ); + + if (cbSign) { + buf.writeUInt32BE(sesLen, p, true); + outstate.sessionId.copy(buf, p += 4); + buf[p += sesLen] = MESSAGE.USERAUTH_REQUEST; + } else + buf[p] = MESSAGE.USERAUTH_REQUEST; + + buf.writeUInt32BE(userLen, ++p, true); + buf.write(username, p += 4, userLen, 'utf8'); + + buf.writeUInt32BE(14, p += userLen, true); + buf.write('ssh-connection', p += 4, 14, 'ascii'); + + buf.writeUInt32BE(9, p += 14, true); + buf.write('publickey', p += 4, 9, 'ascii'); + + buf[p += 9] = (cbSign ? 1 : 0); + + buf.writeUInt32BE(algoLen, ++p, true); + buf.write(pubKeyFullType, p += 4, algoLen, 'ascii'); + + buf.writeUInt32BE(pubKeyLen, p += algoLen, true); + pubKey.copy(buf, p += 4); + + if (!cbSign) { + this._state.authsQueue.push('publickey'); + this.debug('DEBUG: Outgoing: Writing USERAUTH_REQUEST (publickey -- check)'); + return send(this, buf); + } + + cbSign(buf, function(signature) { + if (pubKeyFullType === 'ssh-dss') { + signature = DSASigBERToBare(signature); + } else if (pubKeyFullType !== 'ssh-rsa') { + // ECDSA + signature = ECDSASigASN1ToSSH(signature); + } + + var sigLen = signature.length; + var sigbuf = new Buffer(1 + + 4 + userLen + + 4 + 14 // "ssh-connection" + + 4 + 9 // "publickey" + + 1 + + 4 + algoLen + + 4 + pubKeyLen + + 4 // 4 + algoLen + 4 + sigLen + + 4 + algoLen + + 4 + sigLen); + + p = 0; + + sigbuf[p] = MESSAGE.USERAUTH_REQUEST; + + sigbuf.writeUInt32BE(userLen, ++p, true); + sigbuf.write(username, p += 4, userLen, 'utf8'); + + sigbuf.writeUInt32BE(14, p += userLen, true); + sigbuf.write('ssh-connection', p += 4, 14, 'ascii'); + + sigbuf.writeUInt32BE(9, p += 14, true); + sigbuf.write('publickey', p += 4, 9, 'ascii'); + + sigbuf[p += 9] = 1; + + sigbuf.writeUInt32BE(algoLen, ++p, true); + sigbuf.write(pubKeyFullType, p += 4, algoLen, 'ascii'); + + sigbuf.writeUInt32BE(pubKeyLen, p += algoLen, true); + pubKey.copy(sigbuf, p += 4); + sigbuf.writeUInt32BE(4 + algoLen + 4 + sigLen, p += pubKeyLen, true); + sigbuf.writeUInt32BE(algoLen, p += 4, true); + sigbuf.write(pubKeyFullType, p += 4, algoLen, 'ascii'); + sigbuf.writeUInt32BE(sigLen, p += algoLen, true); + signature.copy(sigbuf, p += 4); + + // Servers shouldn't send packet type 60 in response to signed publickey + // attempts, but if they do, interpret as type 60. + self._state.authsQueue.push('publickey'); + self.debug('DEBUG: Outgoing: Writing USERAUTH_REQUEST (publickey)'); + return send(self, sigbuf); + }); + return true; +}; +SSH2Stream.prototype.authHostbased = function(username, pubKey, hostname, + userlocal, cbSign) { + // TODO: Make DRY by sharing similar code with authPK() + + if (this.server) + throw new Error('Client-only method called in server mode'); + + var self = this; + var outstate = this._state.outgoing; + var pubKeyFullType; + + if (pubKey.public) { + pubKeyFullType = pubKey.fulltype; + pubKey = pubKey.public; + } else { + pubKeyFullType = pubKey.toString('ascii', + 4, + 4 + pubKey.readUInt32BE(0, true)); + } + + var userLen = Buffer.byteLength(username); + var algoLen = Buffer.byteLength(pubKeyFullType); + var pubKeyLen = pubKey.length; + var sesLen = outstate.sessionId.length; + var hostnameLen = Buffer.byteLength(hostname); + var userlocalLen = Buffer.byteLength(userlocal); + var p = 0; + var buf = new Buffer(4 + sesLen + + 1 + + 4 + userLen + + 4 + 14 // "ssh-connection" + + 4 + 9 // "hostbased" + + 4 + algoLen + + 4 + pubKeyLen + + 4 + hostnameLen + + 4 + userlocalLen + ); + + buf.writeUInt32BE(sesLen, p, true); + outstate.sessionId.copy(buf, p += 4); + + buf[p += sesLen] = MESSAGE.USERAUTH_REQUEST; + + buf.writeUInt32BE(userLen, ++p, true); + buf.write(username, p += 4, userLen, 'utf8'); + + buf.writeUInt32BE(14, p += userLen, true); + buf.write('ssh-connection', p += 4, 14, 'ascii'); + + buf.writeUInt32BE(9, p += 14, true); + buf.write('hostbased', p += 4, 9, 'ascii'); + + buf.writeUInt32BE(algoLen, p += 9, true); + buf.write(pubKeyFullType, p += 4, algoLen, 'ascii'); + + buf.writeUInt32BE(pubKeyLen, p += algoLen, true); + pubKey.copy(buf, p += 4); + + buf.writeUInt32BE(hostnameLen, p += pubKeyLen, true); + buf.write(hostname, p += 4, hostnameLen, 'ascii'); + + buf.writeUInt32BE(userlocalLen, p += hostnameLen, true); + buf.write(userlocal, p += 4, userlocalLen, 'utf8'); + + cbSign(buf, function(signature) { + if (pubKeyFullType === 'ssh-dss') { + signature = DSASigBERToBare(signature); + } else if (pubKeyFullType !== 'ssh-rsa') { + // ECDSA + signature = ECDSASigASN1ToSSH(signature); + } + var sigLen = signature.length; + var sigbuf = new Buffer((buf.length - sesLen) + sigLen); + + buf.copy(sigbuf, 0, 4 + sesLen); + sigbuf.writeUInt32BE(sigLen, sigbuf.length - sigLen - 4, true); + signature.copy(sigbuf, sigbuf.length - sigLen); + + self._state.authsQueue.push('hostbased'); + self.debug('DEBUG: Outgoing: Writing USERAUTH_REQUEST (hostbased)'); + return send(self, sigbuf); + }); + return true; +}; +SSH2Stream.prototype.authKeyboard = function(username) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var userLen = Buffer.byteLength(username); + var p = 0; + var buf = new Buffer(1 + + 4 + userLen + + 4 + 14 // "ssh-connection" + + 4 + 20 // "keyboard-interactive" + + 4 // no language set + + 4 // no submethods + ); + + buf[p] = MESSAGE.USERAUTH_REQUEST; + + buf.writeUInt32BE(userLen, ++p, true); + buf.write(username, p += 4, userLen, 'utf8'); + + buf.writeUInt32BE(14, p += userLen, true); + buf.write('ssh-connection', p += 4, 14, 'ascii'); + + buf.writeUInt32BE(20, p += 14, true); + buf.write('keyboard-interactive', p += 4, 20, 'ascii'); + + buf.writeUInt32BE(0, p += 20, true); + + buf.writeUInt32BE(0, p += 4, true); + + this._state.authsQueue.push('keyboard-interactive'); + this.debug('DEBUG: Outgoing: Writing USERAUTH_REQUEST (keyboard-interactive)'); + return send(this, buf); +}; +SSH2Stream.prototype.authNone = function(username) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var userLen = Buffer.byteLength(username); + var p = 0; + var buf = new Buffer(1 + + 4 + userLen + + 4 + 14 // "ssh-connection" + + 4 + 4 // "none" + ); + + buf[p] = MESSAGE.USERAUTH_REQUEST; + + buf.writeUInt32BE(userLen, ++p, true); + buf.write(username, p += 4, userLen, 'utf8'); + + buf.writeUInt32BE(14, p += userLen, true); + buf.write('ssh-connection', p += 4, 14, 'ascii'); + + buf.writeUInt32BE(4, p += 14, true); + buf.write('none', p += 4, 4, 'ascii'); + + this._state.authsQueue.push('none'); + this.debug('DEBUG: Outgoing: Writing USERAUTH_REQUEST (none)'); + return send(this, buf); +}; +SSH2Stream.prototype.authInfoRes = function(responses) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + var responsesLen = 0; + var p = 0; + var resLen; + var len; + var i; + + if (responses) { + for (i = 0, len = responses.length; i < len; ++i) + responsesLen += 4 + Buffer.byteLength(responses[i]); + } + var buf = new Buffer(1 + 4 + responsesLen); + + buf[p++] = MESSAGE.USERAUTH_INFO_RESPONSE; + + buf.writeUInt32BE(responses ? responses.length : 0, p, true); + if (responses) { + p += 4; + for (i = 0, len = responses.length; i < len; ++i) { + resLen = Buffer.byteLength(responses[i]); + buf.writeUInt32BE(resLen, p, true); + p += 4; + if (resLen) { + buf.write(responses[i], p, resLen, 'utf8'); + p += resLen; + } + } + } + + this.debug('DEBUG: Outgoing: Writing USERAUTH_INFO_RESPONSE'); + return send(this, buf); +}; + +// Server-specific methods +// Global +SSH2Stream.prototype.serviceAccept = function(svcName) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var svcNameLen = svcName.length; + var buf = new Buffer(1 + 4 + svcNameLen); + + buf[0] = MESSAGE.SERVICE_ACCEPT; + + buf.writeUInt32BE(svcNameLen, 1, true); + buf.write(svcName, 5, svcNameLen, 'ascii'); + + this.debug('DEBUG: Outgoing: Writing SERVICE_ACCEPT (' + svcName + ')'); + send(this, buf); + + if (this.server && this.banner && svcName === 'ssh-userauth') { + /* + byte SSH_MSG_USERAUTH_BANNER + string message in ISO-10646 UTF-8 encoding + string language tag + */ + var bannerLen = Buffer.byteLength(this.banner); + var packetLen = 1 + 4 + bannerLen + 4; + if (packetLen > BUFFER_MAX_LEN) { + bannerLen -= 1 + 4 + 4; + packetLen -= 1 + 4 + 4; + } + var packet = new Buffer(packetLen); + packet[0] = MESSAGE.USERAUTH_BANNER; + packet.writeUInt32BE(bannerLen, 1, true); + packet.write(this.banner, 5, bannerLen, 'utf8'); + packet.fill(0, packetLen - 4); // Empty language tag + this.debug('DEBUG: Outgoing: Writing USERAUTH_BANNER'); + send(this, packet); + this.banner = undefined; // Prevent banner from being displayed again + } +}; +// 'ssh-connection' service-specific +SSH2Stream.prototype.forwardedTcpip = function(chan, initWindow, maxPacket, + cfg) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var boundAddrLen = Buffer.byteLength(cfg.boundAddr); + var remoteAddrLen = Buffer.byteLength(cfg.remoteAddr); + var p = 36 + boundAddrLen; + var buf = new Buffer(1 + 4 + 15 + 4 + 4 + 4 + 4 + boundAddrLen + 4 + 4 + + remoteAddrLen + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN; + + buf.writeUInt32BE(15, 1, true); + buf.write('forwarded-tcpip', 5, 15, 'ascii'); + + buf.writeUInt32BE(chan, 20, true); + + buf.writeUInt32BE(initWindow, 24, true); + + buf.writeUInt32BE(maxPacket, 28, true); + + buf.writeUInt32BE(boundAddrLen, 32, true); + buf.write(cfg.boundAddr, 36, boundAddrLen, 'ascii'); + + buf.writeUInt32BE(cfg.boundPort, p, true); + + buf.writeUInt32BE(remoteAddrLen, p += 4, true); + buf.write(cfg.remoteAddr, p += 4, remoteAddrLen, 'ascii'); + + buf.writeUInt32BE(cfg.remotePort, p += remoteAddrLen, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN (' + + chan + + ', forwarded-tcpip)'); + return send(this, buf); +}; +SSH2Stream.prototype.x11 = function(chan, initWindow, maxPacket, cfg) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var addrLen = Buffer.byteLength(cfg.originAddr); + var p = 24 + addrLen; + var buf = new Buffer(1 + 4 + 3 + 4 + 4 + 4 + 4 + addrLen + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN; + + buf.writeUInt32BE(3, 1, true); + buf.write('x11', 5, 3, 'ascii'); + + buf.writeUInt32BE(chan, 8, true); + + buf.writeUInt32BE(initWindow, 12, true); + + buf.writeUInt32BE(maxPacket, 16, true); + + buf.writeUInt32BE(addrLen, 20, true); + buf.write(cfg.originAddr, 24, addrLen, 'ascii'); + + buf.writeUInt32BE(cfg.originPort, p, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN (' + + chan + + ', x11)'); + return send(this, buf); +}; +SSH2Stream.prototype.openssh_forwardedStreamLocal = function(chan, initWindow, + maxPacket, cfg) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var pathlen = Buffer.byteLength(cfg.socketPath); + var buf = new Buffer(1 + 4 + 33 + 4 + 4 + 4 + 4 + pathlen + 4); + + buf[0] = MESSAGE.CHANNEL_OPEN; + + buf.writeUInt32BE(33, 1, true); + buf.write('forwarded-streamlocal@openssh.com', 5, 33, 'ascii'); + + buf.writeUInt32BE(chan, 38, true); + + buf.writeUInt32BE(initWindow, 42, true); + + buf.writeUInt32BE(maxPacket, 46, true); + + buf.writeUInt32BE(pathlen, 50, true); + buf.write(cfg.socketPath, 54, pathlen, 'utf8'); + + buf.writeUInt32BE(0, 54 + pathlen, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_OPEN (' + + chan + + ', forwarded-streamlocal@openssh.com)'); + return send(this, buf); +}; +SSH2Stream.prototype.exitStatus = function(chan, status) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + // Does not consume window space + var buf = new Buffer(1 + 4 + 4 + 11 + 1 + 4); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(11, 5, true); + buf.write('exit-status', 9, 11, 'ascii'); + + buf[20] = 0; + + buf.writeUInt32BE(status, 21, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', exit-status)'); + return send(this, buf); +}; +SSH2Stream.prototype.exitSignal = function(chan, name, coreDumped, msg) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + // Does not consume window space + var nameLen = Buffer.byteLength(name); + var msgLen = (msg ? Buffer.byteLength(msg) : 0); + var p = 25 + nameLen; + var buf = new Buffer(1 + 4 + 4 + 11 + 1 + 4 + nameLen + 1 + 4 + msgLen + 4); + + buf[0] = MESSAGE.CHANNEL_REQUEST; + + buf.writeUInt32BE(chan, 1, true); + + buf.writeUInt32BE(11, 5, true); + buf.write('exit-signal', 9, 11, 'ascii'); + + buf[20] = 0; + + buf.writeUInt32BE(nameLen, 21, true); + buf.write(name, 25, nameLen, 'utf8'); + + buf[p++] = (coreDumped ? 1 : 0); + + buf.writeUInt32BE(msgLen, p, true); + p += 4; + if (msgLen) { + buf.write(msg, p, msgLen, 'utf8'); + p += msgLen; + } + + buf.writeUInt32BE(0, p, true); + + this.debug('DEBUG: Outgoing: Writing CHANNEL_REQUEST (' + + chan + + ', exit-signal)'); + return send(this, buf); +}; +// 'ssh-userauth' service-specific +SSH2Stream.prototype.authFailure = function(authMethods, isPartial) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var authsQueue = this._state.authsQueue; + if (!authsQueue.length) + throw new Error('No auth in progress'); + + var methods; + + if (typeof authMethods === 'boolean') { + isPartial = authMethods; + authMethods = undefined; + } + + if (authMethods) { + methods = []; + for (var i = 0, len = authMethods.length; i < len; ++i) { + if (authMethods[i].toLowerCase() === 'none') + continue; + methods.push(authMethods[i]); + } + methods = methods.join(','); + } else + methods = ''; + + var methodsLen = methods.length; + var buf = new Buffer(1 + 4 + methodsLen + 1); + + buf[0] = MESSAGE.USERAUTH_FAILURE; + + buf.writeUInt32BE(methodsLen, 1, true); + buf.write(methods, 5, methodsLen, 'ascii'); + + buf[5 + methodsLen] = (isPartial === true ? 1 : 0); + + this._state.authsQueue.shift(); + this.debug('DEBUG: Outgoing: Writing USERAUTH_FAILURE'); + return send(this, buf); +}; +SSH2Stream.prototype.authSuccess = function() { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var authsQueue = this._state.authsQueue; + if (!authsQueue.length) + throw new Error('No auth in progress'); + + this._state.authsQueue.shift(); + this.debug('DEBUG: Outgoing: Writing USERAUTH_SUCCESS'); + return send(this, USERAUTH_SUCCESS_PACKET); +}; +SSH2Stream.prototype.authPKOK = function(keyAlgo, key) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var authsQueue = this._state.authsQueue; + if (!authsQueue.length || authsQueue[0] !== 'publickey') + throw new Error('"publickey" auth not in progress'); + + var keyAlgoLen = keyAlgo.length; + var keyLen = key.length; + var buf = new Buffer(1 + 4 + keyAlgoLen + 4 + keyLen); + + buf[0] = MESSAGE.USERAUTH_PK_OK; + + buf.writeUInt32BE(keyAlgoLen, 1, true); + buf.write(keyAlgo, 5, keyAlgoLen, 'ascii'); + + buf.writeUInt32BE(keyLen, 5 + keyAlgoLen, true); + key.copy(buf, 5 + keyAlgoLen + 4); + + this._state.authsQueue.shift(); + this.debug('DEBUG: Outgoing: Writing USERAUTH_PK_OK'); + return send(this, buf); +}; +SSH2Stream.prototype.authPasswdChg = function(prompt, lang) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var promptLen = Buffer.byteLength(prompt); + var langLen = lang ? lang.length : 0; + var p = 0; + var buf = new Buffer(1 + 4 + promptLen + 4 + langLen); + + buf[p] = MESSAGE.USERAUTH_PASSWD_CHANGEREQ; + + buf.writeUInt32BE(promptLen, ++p, true); + buf.write(prompt, p += 4, promptLen, 'utf8'); + + buf.writeUInt32BE(langLen, p += promptLen, true); + if (langLen) + buf.write(lang, p += 4, langLen, 'ascii'); + + this.debug('DEBUG: Outgoing: Writing USERAUTH_PASSWD_CHANGEREQ'); + return send(this, buf); +}; +SSH2Stream.prototype.authInfoReq = function(name, instructions, prompts) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + var promptsLen = 0; + var nameLen = name ? Buffer.byteLength(name) : 0; + var instrLen = instructions ? Buffer.byteLength(instructions) : 0; + var p = 0; + var promptLen; + var prompt; + var len; + var i; + + for (i = 0, len = prompts.length; i < len; ++i) + promptsLen += 4 + Buffer.byteLength(prompts[i].prompt) + 1; + var buf = new Buffer(1 + 4 + nameLen + 4 + instrLen + 4 + 4 + promptsLen); + + buf[p++] = MESSAGE.USERAUTH_INFO_REQUEST; + + buf.writeUInt32BE(nameLen, p, true); + p += 4; + if (name) { + buf.write(name, p, nameLen, 'utf8'); + p += nameLen; + } + + buf.writeUInt32BE(instrLen, p, true); + p += 4; + if (instructions) { + buf.write(instructions, p, instrLen, 'utf8'); + p += instrLen; + } + + buf.writeUInt32BE(0, p, true); + p += 4; + + buf.writeUInt32BE(prompts.length, p, true); + p += 4; + for (i = 0, len = prompts.length; i < len; ++i) { + prompt = prompts[i]; + promptLen = Buffer.byteLength(prompt.prompt); + buf.writeUInt32BE(promptLen, p, true); + p += 4; + if (promptLen) { + buf.write(prompt.prompt, p, promptLen, 'utf8'); + p += promptLen; + } + buf[p++] = (prompt.echo ? 1 : 0); + } + + this.debug('DEBUG: Outgoing: Writing USERAUTH_INFO_REQUEST'); + return send(this, buf); +}; + +// Shared incoming/parser functions +function onDISCONNECT(self, reason, code, desc, lang) { // Client/Server + if (code !== DISCONNECT_REASON.BY_APPLICATION) { + var err = new Error(desc || reason); + err.code = code; + self.emit('error', err); + } + self.reset(); +} + +function onKEXINIT(self, init, firstFollows) { // Client/Server + var state = self._state; + var outstate = state.outgoing; + + if (outstate.status === OUT_READY) { + self.debug('DEBUG: Received re-key request'); + outstate.status = OUT_REKEYING; + outstate.kexinit = undefined; + KEXINIT(self, check); + } else + check(); + + function check() { + if (check_KEXINIT(self, init, firstFollows) === true) { + var isGEX = RE_GEX.test(state.kexdh); + if (!self.server) { + if (isGEX) + KEXDH_GEX_REQ(self); + else + KEXDH_INIT(self); + } else { + if (isGEX) + state.incoming.expectedPacket = 'KEXDH_GEX_REQ'; + else + state.incoming.expectedPacket = 'KEXDH_INIT'; + } + } + } +} + +function check_KEXINIT(self, init, firstFollows) { + var state = self._state; + var instate = state.incoming; + var outstate = state.outgoing; + var debug = self.debug; + var serverList; + var clientList; + var val; + var len; + var i; + + debug('DEBUG: Comparing KEXINITs ...'); + + var algos = self.config.algorithms; + + var kexList = algos.kex; + if (self.remoteBugs & BUGS.BAD_DHGEX) { + var copied = false; + for (var j = kexList.length - 1; j >= 0; --j) { + if (kexList[j].indexOf('group-exchange') !== -1) { + if (!copied) { + kexList = kexList.slice(); + copied = true; + } + kexList.splice(j, 1); + } + } + } + + debug('DEBUG: (local) KEX algorithms: ' + kexList); + debug('DEBUG: (remote) KEX algorithms: ' + init.algorithms.kex); + if (self.server) { + serverList = kexList; + clientList = init.algorithms.kex; + } else { + serverList = init.algorithms.kex; + clientList = kexList; + } + // Check for agreeable key exchange algorithm + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching key exchange algorithm'); + var err = new Error('Handshake failed: no matching key exchange algorithm'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + var kex_algorithm = clientList[i]; + debug('DEBUG: KEX algorithm: ' + kex_algorithm); + if (firstFollows + && (!init.algorithms.kex.length + || kex_algorithm !== init.algorithms.kex[0])) { + // Ignore next incoming packet, it was a wrong first guess at KEX algorithm + instate.ignoreNext = true; + } + + debug('DEBUG: (local) Host key formats: ' + algos.serverHostKey); + debug('DEBUG: (remote) Host key formats: ' + init.algorithms.srvHostKey); + if (self.server) { + serverList = algos.serverHostKey; + clientList = init.algorithms.srvHostKey; + } else { + serverList = init.algorithms.srvHostKey; + clientList = algos.serverHostKey; + } + // Check for agreeable server host key format + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching host key format'); + var err = new Error('Handshake failed: no matching host key format'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + state.hostkeyFormat = clientList[i]; + debug('DEBUG: Host key format: ' + state.hostkeyFormat); + + debug('DEBUG: (local) Client->Server ciphers: ' + algos.cipher); + debug('DEBUG: (remote) Client->Server ciphers: ' + + init.algorithms.cs.encrypt); + if (self.server) { + serverList = algos.cipher; + clientList = init.algorithms.cs.encrypt; + } else { + serverList = init.algorithms.cs.encrypt; + clientList = algos.cipher; + } + // Check for agreeable client->server cipher + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching Client->Server cipher'); + var err = new Error('Handshake failed: no matching client->server cipher'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (self.server) { + val = instate.decrypt.type = clientList[i]; + instate.decrypt.isGCM = RE_GCM.test(val); + } else { + val = outstate.encrypt.type = clientList[i]; + outstate.encrypt.isGCM = RE_GCM.test(val); + } + debug('DEBUG: Client->Server Cipher: ' + val); + + debug('DEBUG: (local) Server->Client ciphers: ' + algos.cipher); + debug('DEBUG: (remote) Server->Client ciphers: ' + + (init.algorithms.sc.encrypt)); + if (self.server) { + serverList = algos.cipher; + clientList = init.algorithms.sc.encrypt; + } else { + serverList = init.algorithms.sc.encrypt; + clientList = algos.cipher; + } + // Check for agreeable server->client cipher + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching Server->Client cipher'); + var err = new Error('Handshake failed: no matching server->client cipher'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (self.server) { + val = outstate.encrypt.type = clientList[i]; + outstate.encrypt.isGCM = RE_GCM.test(val); + } else { + val = instate.decrypt.type = clientList[i]; + instate.decrypt.isGCM = RE_GCM.test(val); + } + debug('DEBUG: Server->Client Cipher: ' + val); + + debug('DEBUG: (local) Client->Server HMAC algorithms: ' + algos.hmac); + debug('DEBUG: (remote) Client->Server HMAC algorithms: ' + + init.algorithms.cs.mac); + if (self.server) { + serverList = algos.hmac; + clientList = init.algorithms.cs.mac; + } else { + serverList = init.algorithms.cs.mac; + clientList = algos.hmac; + } + // Check for agreeable client->server hmac algorithm + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching Client->Server HMAC algorithm'); + var err = new Error('Handshake failed: no matching client->server HMAC'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (self.server) + val = instate.hmac.type = clientList[i]; + else + val = outstate.hmac.type = clientList[i]; + debug('DEBUG: Client->Server HMAC algorithm: ' + val); + + debug('DEBUG: (local) Server->Client HMAC algorithms: ' + algos.hmac); + debug('DEBUG: (remote) Server->Client HMAC algorithms: ' + + init.algorithms.sc.mac); + if (self.server) { + serverList = algos.hmac; + clientList = init.algorithms.sc.mac; + } else { + serverList = init.algorithms.sc.mac; + clientList = algos.hmac; + } + // Check for agreeable server->client hmac algorithm + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching Server->Client HMAC algorithm'); + var err = new Error('Handshake failed: no matching server->client HMAC'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (self.server) + val = outstate.hmac.type = clientList[i]; + else + val = instate.hmac.type = clientList[i]; + debug('DEBUG: Server->Client HMAC algorithm: ' + val); + + debug('DEBUG: (local) Client->Server compression algorithms: ' + + algos.compress); + debug('DEBUG: (remote) Client->Server compression algorithms: ' + + init.algorithms.cs.compress); + if (self.server) { + serverList = algos.compress; + clientList = init.algorithms.cs.compress; + } else { + serverList = init.algorithms.cs.compress; + clientList = algos.compress; + } + // Check for agreeable client->server compression algorithm + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching Client->Server compression algorithm'); + var err = new Error('Handshake failed: no matching client->server ' + + 'compression algorithm'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (self.server) + val = instate.decompress.type = clientList[i]; + else + val = outstate.compress.type = clientList[i]; + debug('DEBUG: Client->Server compression algorithm: ' + val); + + debug('DEBUG: (local) Server->Client compression algorithms: ' + + algos.compress); + debug('DEBUG: (remote) Server->Client compression algorithms: ' + + init.algorithms.sc.compress); + if (self.server) { + serverList = algos.compress; + clientList = init.algorithms.sc.compress; + } else { + serverList = init.algorithms.sc.compress; + clientList = algos.compress; + } + // Check for agreeable server->client compression algorithm + for (i = 0, len = clientList.length; + i < len && serverList.indexOf(clientList[i]) === -1; + ++i); + if (i === len) { + // No suitable match found! + debug('DEBUG: No matching Server->Client compression algorithm'); + var err = new Error('Handshake failed: no matching server->client ' + + 'compression algorithm'); + err.level = 'handshake'; + self.emit('error', err); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (self.server) + val = outstate.compress.type = clientList[i]; + else + val = instate.decompress.type = clientList[i]; + debug('DEBUG: Server->Client compression algorithm: ' + val); + + switch (kex_algorithm) { + case 'diffie-hellman-group1-sha1': + state.kexdh = 'group'; + state.kex = crypto.getDiffieHellman('modp2'); + break; + case 'diffie-hellman-group14-sha1': + state.kexdh = 'group'; + state.kex = crypto.getDiffieHellman('modp14'); + break; + case 'ecdh-sha2-nistp256': + state.kexdh = 'ec-sha256'; + state.kex = crypto.createECDH(SSH_TO_OPENSSL[kex_algorithm]); + break; + case 'ecdh-sha2-nistp384': + state.kexdh = 'ec-sha384'; + state.kex = crypto.createECDH(SSH_TO_OPENSSL[kex_algorithm]); + break; + case 'ecdh-sha2-nistp521': + state.kexdh = 'ec-sha512'; + state.kex = crypto.createECDH(SSH_TO_OPENSSL[kex_algorithm]); + break; + default: + if (kex_algorithm === 'diffie-hellman-group-exchange-sha1') + state.kexdh = 'gex-sha1'; + else if (kex_algorithm === 'diffie-hellman-group-exchange-sha256') + state.kexdh = 'gex-sha256'; + // Reset kex object if DH group exchange is selected on re-key and DH + // group exchange was used before the re-key. This ensures that we send + // the right DH packet after the KEXINIT exchange + state.kex = undefined; + } + + if (state.kex) { + outstate.pubkey = state.kex.generateKeys(); + var idx = 0; + len = outstate.pubkey.length; + while (outstate.pubkey[idx] === 0x00) { + ++idx; + --len; + } + if (outstate.pubkey[idx] & 0x80) { + var key = new Buffer(len + 1); + key[0] = 0; + outstate.pubkey.copy(key, 1, idx); + outstate.pubkey = key; + } + } + + return true; +} + +function onKEXDH_GEX_GROUP(self, prime, gen) { + var state = self._state; + var outstate = state.outgoing; + + state.kex = crypto.createDiffieHellman(prime, gen); + outstate.pubkey = state.kex.generateKeys(); + var idx = 0; + var len = outstate.pubkey.length; + while (outstate.pubkey[idx] === 0x00) { + ++idx; + --len; + } + if (outstate.pubkey[idx] & 0x80) { + var key = new Buffer(len + 1); + key[0] = 0; + outstate.pubkey.copy(key, 1, idx); + outstate.pubkey = key; + } + KEXDH_INIT(self); +} + +function onKEXDH_INIT(self, e) { // Server + KEXDH_REPLY(self, e); +} + +function onKEXDH_REPLY(self, info, verifiedHost) { // Client + var state = self._state; + var instate = state.incoming; + var outstate = state.outgoing; + var debug = self.debug; + var len; + var i; + + if (verifiedHost === undefined) { + instate.expectedPacket = 'NEWKEYS'; + outstate.sentNEWKEYS = false; + + debug('DEBUG: Checking host key format'); + // Ensure all host key formats agree + var hostkey_format = readString(info.hostkey, 0, 'ascii', self); + if (hostkey_format === false) + return false; + if (info.hostkey_format !== state.hostkeyFormat + || info.hostkey_format !== hostkey_format) { + // Expected and actual server host key format do not match! + debug('DEBUG: Host key format mismatch'); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + self.reset(); + var err = new Error('Handshake failed: host key format mismatch'); + err.level = 'handshake'; + self.emit('error', err); + return false; + } + + debug('DEBUG: Checking signature format'); + // Ensure signature formats agree + var sig_format = readString(info.sig, 0, 'ascii', self); + if (sig_format === false) + return false; + if (info.sig_format !== sig_format) { + debug('DEBUG: Signature format mismatch'); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + self.reset(); + var err = new Error('Handshake failed: signature format mismatch'); + err.level = 'handshake'; + self.emit('error', err); + return false; + } + } + + // Verify the host fingerprint first if needed + if (outstate.status === OUT_INIT) { + if (verifiedHost === undefined) { + debug('DEBUG: Verifying host fingerprint'); + var sync = true; + var emitted = self.emit('fingerprint', info.hostkey, function(permitted) { + // Prevent multiple calls to this callback + if (verifiedHost !== undefined) + return; + verifiedHost = !!permitted; + if (!sync) { + // Continue execution by re-entry + onKEXDH_REPLY(self, info, verifiedHost); + } + }); + sync = false; + // Support async calling of verification callback + if (emitted && verifiedHost === undefined) + return; + } + if (verifiedHost === undefined) + debug('DEBUG: Host accepted by default (no verification)'); + else if (verifiedHost === true) + debug('DEBUG: Host accepted (verified)'); + else { + debug('DEBUG: Host denied via fingerprint verification'); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + self.reset(); + var err = new Error('Handshake failed: ' + + 'host fingerprint verification failed'); + err.level = 'handshake'; + self.emit('error', err); + return false; + } + } + + var slicepos = -1; + for (i = 0, len = info.pubkey.length; i < len; ++i) { + if (info.pubkey[i] === 0) + ++slicepos; + else + break; + } + if (slicepos > -1) + info.pubkey = info.pubkey.slice(slicepos + 1); + info.secret = tryComputeSecret(state.kex, info.pubkey); + if (info.secret instanceof Error) { + info.secret.message = 'Error while computing DH secret (' + + state.kexdh + '): ' + + info.secret.message; + info.secret.level = 'handshake'; + self.emit('error', info.secret); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + var hashAlgo; + if (state.kexdh === 'group') + hashAlgo = 'sha1'; + else + hashAlgo = RE_KEX_HASH.exec(state.kexdh)[1]; + var hash = crypto.createHash(hashAlgo); + + var len_ident = Buffer.byteLength(self.config.ident); + var len_sident = Buffer.byteLength(instate.identRaw); + var len_init = outstate.kexinit.length; + var len_sinit = instate.kexinit.length; + var len_hostkey = info.hostkey.length; + var len_pubkey = outstate.pubkey.length; + var len_spubkey = info.pubkey.length; + var len_secret = info.secret.length; + + var idx_pubkey = 0; + var idx_spubkey = 0; + var idx_secret = 0; + + while (outstate.pubkey[idx_pubkey] === 0x00) { + ++idx_pubkey; + --len_pubkey; + } + while (info.pubkey[idx_spubkey] === 0x00) { + ++idx_spubkey; + --len_spubkey; + } + while (info.secret[idx_secret] === 0x00) { + ++idx_secret; + --len_secret; + } + if (outstate.pubkey[idx_pubkey] & 0x80) + ++len_pubkey; + if (info.pubkey[idx_spubkey] & 0x80) + ++len_spubkey; + if (info.secret[idx_secret] & 0x80) + ++len_secret; + + var exchangeBufLen = len_ident + + len_sident + + len_init + + len_sinit + + len_hostkey + + len_pubkey + + len_spubkey + + len_secret + + (4 * 8); // Length fields for above values + + // Group exchange-related + var isGEX = RE_GEX.test(state.kexdh); + var len_gex_prime = 0; + var len_gex_gen = 0; + var idx_gex_prime = 0; + var idx_gex_gen = 0; + var gex_prime; + var gex_gen; + if (isGEX) { + gex_prime = state.kex.getPrime(); + gex_gen = state.kex.getGenerator(); + len_gex_prime = gex_prime.length; + len_gex_gen = gex_gen.length; + while (gex_prime[idx_gex_prime] === 0x00) { + ++idx_gex_prime; + --len_gex_prime; + } + while (gex_gen[idx_gex_gen] === 0x00) { + ++idx_gex_gen; + --len_gex_gen; + } + if (gex_prime[idx_gex_prime] & 0x80) + ++len_gex_prime; + if (gex_gen[idx_gex_gen] & 0x80) + ++len_gex_gen; + exchangeBufLen += (4 * 3); // min, n, max values + exchangeBufLen += (4 * 2); // prime, generator length fields + exchangeBufLen += len_gex_prime; + exchangeBufLen += len_gex_gen; + } + + + var bp = 0; + var exchangeBuf = new Buffer(exchangeBufLen); + + exchangeBuf.writeUInt32BE(len_ident, bp, true); + bp += 4; + exchangeBuf.write(self.config.ident, bp, 'utf8'); // V_C + bp += len_ident; + + exchangeBuf.writeUInt32BE(len_sident, bp, true); + bp += 4; + exchangeBuf.write(instate.identRaw, bp, 'utf8'); // V_S + bp += len_sident; + + exchangeBuf.writeUInt32BE(len_init, bp, true); + bp += 4; + outstate.kexinit.copy(exchangeBuf, bp); // I_C + bp += len_init; + outstate.kexinit = undefined; + + exchangeBuf.writeUInt32BE(len_sinit, bp, true); + bp += 4; + instate.kexinit.copy(exchangeBuf, bp); // I_S + bp += len_sinit; + instate.kexinit = undefined; + + exchangeBuf.writeUInt32BE(len_hostkey, bp, true); + bp += 4; + info.hostkey.copy(exchangeBuf, bp); // K_S + bp += len_hostkey; + + if (isGEX) { + KEXDH_GEX_REQ_PACKET.slice(1).copy(exchangeBuf, bp); // min, n, max + bp += (4 * 3); // Skip over bytes just copied + + exchangeBuf.writeUInt32BE(len_gex_prime, bp, true); + bp += 4; + if (gex_prime[idx_gex_prime] & 0x80) + exchangeBuf[bp++] = 0; + gex_prime.copy(exchangeBuf, bp, idx_gex_prime); // p + bp += len_gex_prime - (gex_prime[idx_gex_prime] & 0x80 ? 1 : 0); + + exchangeBuf.writeUInt32BE(len_gex_gen, bp, true); + bp += 4; + if (gex_gen[idx_gex_gen] & 0x80) + exchangeBuf[bp++] = 0; + gex_gen.copy(exchangeBuf, bp, idx_gex_gen); // g + bp += len_gex_gen - (gex_gen[idx_gex_gen] & 0x80 ? 1 : 0); + } + + exchangeBuf.writeUInt32BE(len_pubkey, bp, true); + bp += 4; + if (outstate.pubkey[idx_pubkey] & 0x80) + exchangeBuf[bp++] = 0; + outstate.pubkey.copy(exchangeBuf, bp, idx_pubkey); // e + bp += len_pubkey - (outstate.pubkey[idx_pubkey] & 0x80 ? 1 : 0); + + exchangeBuf.writeUInt32BE(len_spubkey, bp, true); + bp += 4; + if (info.pubkey[idx_spubkey] & 0x80) + exchangeBuf[bp++] = 0; + info.pubkey.copy(exchangeBuf, bp, idx_spubkey); // f + bp += len_spubkey - (info.pubkey[idx_spubkey] & 0x80 ? 1 : 0); + + exchangeBuf.writeUInt32BE(len_secret, bp, true); + bp += 4; + if (info.secret[idx_secret] & 0x80) + exchangeBuf[bp++] = 0; + info.secret.copy(exchangeBuf, bp, idx_secret); // K + + outstate.exchangeHash = hash.update(exchangeBuf).digest(); // H + + var rawsig = readString(info.sig, info.sig._pos, self); // s + if (rawsig === false) + return false; + + var keyAlgo; + switch (info.sig_format) { + case 'ssh-rsa': + keyAlgo = 'RSA-SHA1'; + break; + case 'ssh-dss': + keyAlgo = 'DSA-SHA1'; + break; + case 'ecdsa-sha2-nistp256': + keyAlgo = 'sha256'; + break; + case 'ecdsa-sha2-nistp384': + keyAlgo = 'sha384'; + break; + case 'ecdsa-sha2-nistp521': + keyAlgo = 'sha512'; + break; + default: + debug('DEBUG: Signature format unsupported: ' + info.sig_format); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + self.reset(); + var err = new Error('Handshake failed: signature format unsupported: ' + + info.sig_format); + err.level = 'handshake'; + self.emit('error', err); + return false; + } + var verifier = crypto.createVerify(keyAlgo); + verifier.update(outstate.exchangeHash); + + var asn1KeyBuf; + if (keyAlgo === 'RSA-SHA1') { + asn1KeyBuf = RSAKeySSHToASN1(info.hostkey, self); + } else if (keyAlgo === 'DSA-SHA1') { + asn1KeyBuf = DSAKeySSHToASN1(info.hostkey, self); + rawsig = DSASigBareToBER(rawsig); + } else { + // ECDSA + asn1KeyBuf = ECDSAKeySSHToASN1(info.hostkey, self); + rawsig = ECDSASigSSHToASN1(rawsig, self); + } + + if (!asn1KeyBuf || !rawsig) + return false; + + debug('DEBUG: Verifying signature'); + + var b64key = asn1KeyBuf.toString('base64').replace(/(.{64})/g, '$1\n'); + var fullkey = '-----BEGIN PUBLIC KEY-----\n' + + b64key + + (b64key[b64key.length - 1] === '\n' ? '' : '\n') + + '-----END PUBLIC KEY-----'; + + var verified = verifier.verify(fullkey, rawsig); + + if (!verified) { + debug('DEBUG: Signature verification failed'); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + self.reset(); + var err = new Error('Handshake failed: signature verification failed'); + err.level = 'handshake'; + self.emit('error', err); + return false; + } + + if (outstate.sessionId === undefined) + outstate.sessionId = outstate.exchangeHash; + outstate.kexsecret = info.secret; + + debug('DEBUG: Outgoing: Writing NEWKEYS'); + if (outstate.status === OUT_REKEYING) + send(self, NEWKEYS_PACKET, undefined, true); + else + send(self, NEWKEYS_PACKET); + outstate.sentNEWKEYS = true; + + if (verifiedHost !== undefined && instate.expectedPacket === undefined) { + // We received NEWKEYS while we were waiting for the fingerprint + // verification callback to be called. In this case we have to re-execute + // onNEWKEYS to finish the handshake. + onNEWKEYS(self); + } +} + +function onNEWKEYS(self) { // Client/Server + var state = self._state; + var outstate = state.outgoing; + var instate = state.incoming; + + instate.expectedPacket = undefined; + + if (!outstate.sentNEWKEYS) + return; + + var idx_secret = 0; + var len = outstate.kexsecret.length; + while (outstate.kexsecret[idx_secret] === 0x00) { + ++idx_secret; + --len; + } + + var blocklen = 8; + var keylen = 0; + var p = 0; + + var dhHashAlgo; + if (state.kexdh === 'group') + dhHashAlgo = 'sha1'; + else + dhHashAlgo = RE_KEX_HASH.exec(state.kexdh)[1]; + + var len_secret = (outstate.kexsecret[idx_secret] & 0x80 ? 1 : 0) + len; + var secret = new Buffer(4 + len_secret); + var iv; + var key; + + // Whenever the client sends a new authentication request, it is enqueued + // here. Once the request is resolved (success, fail, or PK_OK), + // dequeue. Whatever is at the front of the queue determines how we + // interpret packet type 60. + state.authsQueue = []; + + secret.writeUInt32BE(len_secret, p, true); + p += 4; + if (outstate.kexsecret[idx_secret] & 0x80) + secret[p++] = 0; + outstate.kexsecret.copy(secret, p, idx_secret); + outstate.kexsecret = undefined; + if (!isStreamCipher(outstate.encrypt.type)) { + iv = crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(!self.server ? 'A' : 'B', 'ascii') + .update(outstate.sessionId) + .digest(); + switch (outstate.encrypt.type) { + case 'aes128-gcm': + case 'aes256-gcm': + case 'aes128-gcm@openssh.com': + case 'aes256-gcm@openssh.com': + blocklen = 12; + break; + case 'aes256-cbc': + case 'aes192-cbc': + case 'aes128-cbc': + case 'aes256-ctr': + case 'aes192-ctr': + case 'aes128-ctr': + blocklen = 16; + } + outstate.encrypt.size = blocklen; + while (blocklen > iv.length) { + iv = Buffer.concat([iv, + crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(iv) + .digest()]); + } + iv = iv.slice(0, blocklen); + } else { + outstate.encrypt.size = blocklen; + iv = EMPTY_BUFFER; // Streaming ciphers don't use an IV upfront + } + switch (outstate.encrypt.type) { + case 'aes256-gcm': + case 'aes256-gcm@openssh.com': + case 'aes256-cbc': + case 'aes256-ctr': + case 'arcfour256': + keylen = 32; + break; + case '3des-cbc': + case '3des-ctr': + case 'aes192-cbc': + case 'aes192-ctr': + keylen = 24; + break; + case 'aes128-gcm': + case 'aes128-gcm@openssh.com': + case 'aes128-cbc': + case 'aes128-ctr': + case 'cast128-cbc': + case 'blowfish-cbc': + case 'arcfour': + case 'arcfour128': + keylen = 16; + break; + } + + key = crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(!self.server ? 'C' : 'D', 'ascii') + .update(outstate.sessionId) + .digest(); + while (keylen > key.length) { + key = Buffer.concat([key, + crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(key) + .digest()]); + } + key = key.slice(0, keylen); + + if (outstate.encrypt.isGCM) { + outstate.encrypt.size = 16; + outstate.encrypt.iv = iv; + outstate.encrypt.key = key; + outstate.encrypt.instance = true; + } else { + var cipherAlgo = SSH_TO_OPENSSL[outstate.encrypt.type]; + outstate.encrypt.instance = crypto.createCipheriv(cipherAlgo, key, iv); + outstate.encrypt.instance.setAutoPadding(false); + } + + // And now for decrypting ... + + blocklen = 8; + keylen = 0; + if (!isStreamCipher(instate.decrypt.type)) { + iv = crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(!self.server ? 'B' : 'A', 'ascii') + .update(outstate.sessionId) + .digest(); + switch (instate.decrypt.type) { + case 'aes128-gcm': + case 'aes256-gcm': + case 'aes128-gcm@openssh.com': + case 'aes256-gcm@openssh.com': + blocklen = 12; + break; + case 'aes256-cbc': + case 'aes192-cbc': + case 'aes128-cbc': + case 'aes256-ctr': + case 'aes192-ctr': + case 'aes128-ctr': + blocklen = 16; + } + if (instate.decrypt.isGCM) + instate.decrypt.size = 16; + else + instate.decrypt.size = blocklen; + while (blocklen > iv.length) { + iv = Buffer.concat([iv, + crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(iv) + .digest()]); + } + iv = iv.slice(0, blocklen); + } else { + instate.decrypt.size = blocklen; + iv = EMPTY_BUFFER; // Streaming ciphers don't use an IV upfront + } + + // Create a reusable buffer for decryption purposes + instate.decrypt.buf = new Buffer(instate.decrypt.size); + + switch (instate.decrypt.type) { + case 'aes256-gcm': + case 'aes256-gcm@openssh.com': + case 'aes256-cbc': + case 'aes256-ctr': + case 'arcfour256': + keylen = 32; + break; + case '3des-cbc': + case '3des-ctr': + case 'aes192-cbc': + case 'aes192-ctr': + keylen = 24; + break; + case 'aes128-gcm': + case 'aes128-gcm@openssh.com': + case 'aes128-cbc': + case 'aes128-ctr': + case 'cast128-cbc': + case 'blowfish-cbc': + case 'arcfour': + case 'arcfour128': + keylen = 16; + break; + } + key = crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(!self.server ? 'D' : 'C', 'ascii') + .update(outstate.sessionId) + .digest(); + while (keylen > key.length) { + key = Buffer.concat([key, + crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(key) + .digest()]); + } + key = key.slice(0, keylen); + + var decipherAlgo = SSH_TO_OPENSSL[instate.decrypt.type]; + instate.decrypt.instance = crypto.createDecipheriv(decipherAlgo, key, iv); + instate.decrypt.instance.setAutoPadding(false); + instate.decrypt.iv = iv; + instate.decrypt.key = key; + + /* The "arcfour128" algorithm is the RC4 cipher, as described in + [SCHNEIER], using a 128-bit key. The first 1536 bytes of keystream + generated by the cipher MUST be discarded, and the first byte of the + first encrypted packet MUST be encrypted using the 1537th byte of + keystream. + + -- http://tools.ietf.org/html/rfc4345#section-4 */ + var emptyBuf; + if (outstate.encrypt.type.substr(0, 7) === 'arcfour') { + emptyBuf = new Buffer(1536); + emptyBuf.fill(0); + outstate.encrypt.instance.update(emptyBuf); + } + if (instate.decrypt.type.substr(0, 7) === 'arcfour') { + emptyBuf = new Buffer(1536); + emptyBuf.fill(0); + instate.decrypt.instance.update(emptyBuf); + } + + var createKeyLen = 0; + var checkKeyLen = 0; + switch (outstate.hmac.type) { + case 'hmac-ripemd160': + case 'hmac-sha1': + createKeyLen = 20; + outstate.hmac.size = 20; + break; + case 'hmac-sha1-96': + createKeyLen = 20; + outstate.hmac.size = 12; + break; + case 'hmac-sha2-256': + createKeyLen = 32; + outstate.hmac.size = 32; + break; + case 'hmac-sha2-256-96': + createKeyLen = 32; + outstate.hmac.size = 12; + break; + case 'hmac-sha2-512': + createKeyLen = 64; + outstate.hmac.size = 64; + break; + case 'hmac-sha2-512-96': + createKeyLen = 64; + outstate.hmac.size = 12; + break; + case 'hmac-md5': + createKeyLen = 16; + outstate.hmac.size = 16; + break; + case 'hmac-md5-96': + createKeyLen = 16; + outstate.hmac.size = 12; + break; + } + switch (instate.hmac.type) { + case 'hmac-ripemd160': + case 'hmac-sha1': + checkKeyLen = 20; + instate.hmac.size = 20; + break; + case 'hmac-sha1-96': + checkKeyLen = 20; + instate.hmac.size = 12; + break; + case 'hmac-sha2-256': + checkKeyLen = 32; + instate.hmac.size = 32; + break; + case 'hmac-sha2-256-96': + checkKeyLen = 32; + instate.hmac.size = 12; + break; + case 'hmac-sha2-512': + checkKeyLen = 64; + instate.hmac.size = 64; + break; + case 'hmac-sha2-512-96': + checkKeyLen = 64; + instate.hmac.size = 12; + break; + case 'hmac-md5': + checkKeyLen = 16; + instate.hmac.size = 16; + break; + case 'hmac-md5-96': + checkKeyLen = 16; + instate.hmac.size = 12; + break; + } + + if (!outstate.encrypt.isGCM) { + key = crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(!self.server ? 'E' : 'F', 'ascii') + .update(outstate.sessionId) + .digest(); + while (createKeyLen > key.length) { + key = Buffer.concat([key, + crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(key) + .digest()]); + } + outstate.hmac.key = key.slice(0, createKeyLen); + } else + outstate.hmac.key = undefined; + if (!instate.decrypt.isGCM) { + key = crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(!self.server ? 'F' : 'E', 'ascii') + .update(outstate.sessionId) + .digest(); + while (checkKeyLen > key.length) { + key = Buffer.concat([key, + crypto.createHash(dhHashAlgo) + .update(secret) + .update(outstate.exchangeHash) + .update(key) + .digest()]); + } + instate.hmac.key = key.slice(0, checkKeyLen); + } else { + instate.hmac.key = undefined; + instate.hmac.size = 16; + } + + outstate.exchangeHash = undefined; + + // Create a reusable buffer for message verification purposes + if (!instate.hmac.buf + || instate.hmac.buf.length !== instate.hmac.size) + instate.hmac.buf = new Buffer(instate.hmac.size); + + if (outstate.compress.type === 'zlib') + outstate.compress.instance = zlib.createDeflate(ZLIB_OPTS); + else if (outstate.compress.type === 'none') + outstate.compress.instance = false; + if (instate.decompress.type === 'zlib') + instate.decompress.instance = zlib.createInflate(ZLIB_OPTS); + else if (instate.decompress.type === 'none') + instate.decompress.instance = false; + + self.bytesSent = self.bytesReceived = 0; + + if (outstate.status === OUT_REKEYING) { + outstate.status = OUT_READY; + + // Empty our outbound buffer of any data we tried to send during the + // re-keying process + var queue = outstate.rekeyQueue; + var qlen = queue.length; + var q = 0; + + outstate.rekeyQueue = []; + + for (; q < qlen; ++q) { + if (Buffer.isBuffer(queue[q])) + send(self, queue[q]); + else + send(self, queue[q][0], queue[q][1]); + } + + // Now empty our inbound buffer of any non-transport layer packets we + // received during the re-keying process + queue = instate.rekeyQueue; + qlen = queue.length; + q = 0; + + instate.rekeyQueue = []; + + var curSeqno = instate.seqno; + for (; q < qlen; ++q) { + instate.seqno = queue[q][0]; + instate.payload = queue[q][1]; + if (parsePacket(self) === false) + return; + + if (instate.status === IN_INIT) { + // We were reset due to some error/disagreement ? + return; + } + } + instate.seqno = curSeqno; + } else { + outstate.status = OUT_READY; + if (instate.status === IN_PACKET) { + // Explicitly update incoming packet parser status in order to get the + // correct decipher, hmac, etc. states. + + // We only get here if the host fingerprint callback was called + // asynchronously and the incoming packet parser is still expecting an + // unencrypted packet, etc. + + self.debug('DEBUG: Parser: IN_PACKETBEFORE (update) (expecting ' + + instate.decrypt.size + ')'); + // Wait for the right number of bytes so we can determine the incoming + // packet length + expectData(self, + EXP_TYPE_BYTES, + instate.decrypt.size, + instate.decrypt.buf); + } + self.emit('ready'); + } +} + +function parsePacket(self, callback) { + var instate = self._state.incoming; + var outstate = self._state.outgoing; + var payload = instate.payload; + var seqno = instate.seqno; + var serviceName; + var lang; + var message; + var info; + var chan; + var data; + var srcIP; + var srcPort; + var sender; + var window; + var packetSize; + var recipient; + var description; + var socketPath; + + if (++instate.seqno > MAX_SEQNO) + instate.seqno = 0; + + if (instate.ignoreNext) { + self.debug('DEBUG: Parser: Packet ignored'); + instate.ignoreNext = false; + return; + } + + var type = payload[0]; + if (type === undefined) + return false; + + // If we receive a packet during handshake that is not the expected packet + // and it is not one of: DISCONNECT, IGNORE, UNIMPLEMENTED, or DEBUG, then we + // close the stream + if (outstate.status !== OUT_READY + && MESSAGE[type] !== instate.expectedPacket + && type < 1 + && type > 4) { + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, expected: ' + + instate.expectedPacket + + ' but got: ' + + MESSAGE[type]); + // XXX: Potential issue where the module user decides to initiate a rekey + // via KEXINIT() (which sets `expectedPacket`) after receiving a packet + // and there is still another packet already waiting to be parsed at the + // time the KEXINIT is written. this will cause an unexpected disconnect... + self.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + var err = new Error('Received unexpected packet'); + err.level = 'protocol'; + self.emit('error', err); + return false; + } + + if (type === MESSAGE.CHANNEL_DATA) { + /* + byte SSH_MSG_CHANNEL_DATA + uint32 recipient channel + string data + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + // TODO: MAX_CHAN_DATA_LEN here should really be dependent upon the + // channel's packet size. The ssh2 module uses 32KB, so we'll hard + // code this for now ... + data = readString(payload, 5, self, callback, 32768); + if (data === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_DATA (' + + chan + + ')'); + self.emit('CHANNEL_DATA:' + chan, data); + } else if (type === MESSAGE.CHANNEL_EXTENDED_DATA) { + /* + byte SSH_MSG_CHANNEL_EXTENDED_DATA + uint32 recipient channel + uint32 data_type_code + string data + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + var dataType = readInt(payload, 5, self, callback); + if (dataType === false) + return false; + data = readString(payload, 9, self, callback); + if (data === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: ' + + 'CHANNEL_EXTENDED_DATA (' + + chan + + ')'); + self.emit('CHANNEL_EXTENDED_DATA:' + chan, dataType, data); + } else if (type === MESSAGE.CHANNEL_WINDOW_ADJUST) { + /* + byte SSH_MSG_CHANNEL_WINDOW_ADJUST + uint32 recipient channel + uint32 bytes to add + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + var bytesToAdd = readInt(payload, 5, self, callback); + if (bytesToAdd === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: ' + + 'CHANNEL_WINDOW_ADJUST (' + + chan + + ', ' + + bytesToAdd + + ')'); + self.emit('CHANNEL_WINDOW_ADJUST:' + chan, bytesToAdd); + } else if (type === MESSAGE.CHANNEL_SUCCESS) { + /* + byte SSH_MSG_CHANNEL_SUCCESS + uint32 recipient channel + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_SUCCESS (' + + chan + + ')'); + self.emit('CHANNEL_SUCCESS:' + chan); + } else if (type === MESSAGE.CHANNEL_FAILURE) { + /* + byte SSH_MSG_CHANNEL_FAILURE + uint32 recipient channel + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_FAILURE (' + + chan + + ')'); + self.emit('CHANNEL_FAILURE:' + chan); + } else if (type === MESSAGE.CHANNEL_EOF) { + /* + byte SSH_MSG_CHANNEL_EOF + uint32 recipient channel + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_EOF (' + + chan + + ')'); + self.emit('CHANNEL_EOF:' + chan); + } else if (type === MESSAGE.CHANNEL_OPEN) { + /* + byte SSH_MSG_CHANNEL_OPEN + string channel type in US-ASCII only + uint32 sender channel + uint32 initial window size + uint32 maximum packet size + .... channel type specific data follows + */ + var chanType = readString(payload, 1, 'ascii', self, callback); + if (chanType === false) + return false; + sender = readInt(payload, payload._pos, self, callback); + if (sender === false) + return false; + window = readInt(payload, payload._pos += 4, self, callback); + if (window === false) + return false; + packetSize = readInt(payload, payload._pos += 4, self, callback); + if (packetSize === false) + return false; + var channel; + + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_OPEN (' + + sender + + ', ' + + chanType + + ')'); + + if (chanType === 'forwarded-tcpip' // Server->Client + || chanType === 'direct-tcpip') { // Client->Server + /* + string address that was connected / host to connect + uint32 port that was connected / port to connect + string originator IP address + uint32 originator port + */ + var destIP = readString(payload, + payload._pos += 4, + 'ascii', + self, + callback); + if (destIP === false) + return false; + var destPort = readInt(payload, payload._pos, self, callback); + if (destPort === false) + return false; + srcIP = readString(payload, payload._pos += 4, 'ascii', self, callback); + if (srcIP === false) + return false; + srcPort = readInt(payload, payload._pos, self, callback); + if (srcPort === false) + return false; + channel = { + type: chanType, + sender: sender, + window: window, + packetSize: packetSize, + data: { + destIP: destIP, + destPort: destPort, + srcIP: srcIP, + srcPort: srcPort + } + }; + } else if (// Server->Client + chanType === 'forwarded-streamlocal@openssh.com' + // Client->Server + || chanType === 'direct-streamlocal@openssh.com') { + /* + string socket path + string reserved for future use + */ + socketPath = readString(payload, + payload._pos += 4, + 'utf8', + self, + callback); + if (socketPath === false) + return false; + channel = { + type: chanType, + sender: sender, + window: window, + packetSize: packetSize, + data: { + socketPath: socketPath, + } + }; + } else if (chanType === 'x11') { // Server->Client + /* + string originator address (e.g., "192.168.7.38") + uint32 originator port + */ + srcIP = readString(payload, payload._pos += 4, 'ascii', self, callback); + if (srcIP === false) + return false; + srcPort = readInt(payload, payload._pos, self, callback); + if (srcPort === false) + return false; + channel = { + type: chanType, + sender: sender, + window: window, + packetSize: packetSize, + data: { + srcIP: srcIP, + srcPort: srcPort + } + }; + } else { + // 'session' (Client->Server), 'auth-agent@openssh.com' (Server->Client) + channel = { + type: chanType, + sender: sender, + window: window, + packetSize: packetSize, + data: {} + }; + } + + self.emit('CHANNEL_OPEN', channel); + } else if (type === MESSAGE.CHANNEL_OPEN_CONFIRMATION) { + /* + byte SSH_MSG_CHANNEL_OPEN_CONFIRMATION + uint32 recipient channel + uint32 sender channel + uint32 initial window size + uint32 maximum packet size + .... channel type specific data follows + */ + // "The 'recipient channel' is the channel number given in the + // original open request, and 'sender channel' is the channel number + // allocated by the other side." + recipient = readInt(payload, 1, self, callback); + if (recipient === false) + return false; + sender = readInt(payload, 5, self, callback); + if (sender === false) + return false; + window = readInt(payload, 9, self, callback); + if (window === false) + return false; + packetSize = readInt(payload, 13, self, callback); + if (packetSize === false) + return false; + + info = { + recipient: recipient, + sender: sender, + window: window, + packetSize: packetSize + }; + + if (payload.length > 17) + info.data = payload.slice(17); + + self.emit('CHANNEL_OPEN_CONFIRMATION:' + info.recipient, info); + } else if (type === MESSAGE.CHANNEL_OPEN_FAILURE) { + /* + byte SSH_MSG_CHANNEL_OPEN_FAILURE + uint32 recipient channel + uint32 reason code + string description in ISO-10646 UTF-8 encoding + string language tag + */ + recipient = readInt(payload, 1, self, callback); + if (recipient === false) + return false; + var reasonCode = readInt(payload, 5, self, callback); + if (reasonCode === false) + return false; + description = readString(payload, 9, 'utf8', self, callback); + if (description === false) + return false; + lang = readString(payload, payload._pos, 'utf8', self, callback); + if (lang === false) + return false; + payload._pos = 9; + info = { + recipient: recipient, + reasonCode: reasonCode, + reason: CHANNEL_OPEN_FAILURE[reasonCode], + description: description, + lang: lang + }; + + self.emit('CHANNEL_OPEN_FAILURE:' + info.recipient, info); + } else if (type === MESSAGE.CHANNEL_CLOSE) { + /* + byte SSH_MSG_CHANNEL_CLOSE + uint32 recipient channel + */ + chan = readInt(payload, 1, self, callback); + if (chan === false) + return false; + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_CLOSE (' + + chan + + ')'); + self.emit('CHANNEL_CLOSE:' + chan); + } else if (type === MESSAGE.IGNORE) { + /* + byte SSH_MSG_IGNORE + string data + */ + } else if (type === MESSAGE.DISCONNECT) { + /* + byte SSH_MSG_DISCONNECT + uint32 reason code + string description in ISO-10646 UTF-8 encoding + string language tag + */ + var reason = readInt(payload, 1, self, callback); + if (reason === false) + return false; + var reasonText = DISCONNECT_REASON[reason]; + description = readString(payload, 5, 'utf8', self, callback); + if (description === false) + return false; + + if (payload._pos < payload.length) + lang = readString(payload, payload._pos, 'ascii', self, callback); + + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: DISCONNECT (' + + reasonText + + ')'); + + self.emit('DISCONNECT', reasonText, reason, description, lang); + } else if (type === MESSAGE.DEBUG) { + /* + byte SSH_MSG_DEBUG + boolean always_display + string message in ISO-10646 UTF-8 encoding + string language tag + */ + message = readString(payload, 2, 'utf8', self, callback); + if (message === false) + return false; + lang = readString(payload, payload._pos, 'ascii', self, callback); + if (lang === false) + return false; + + self.emit('DEBUG', message, lang); + } else if (type === MESSAGE.NEWKEYS) { + /* + byte SSH_MSG_NEW_KEYS + */ + self.emit('NEWKEYS'); + } else if (type === MESSAGE.SERVICE_REQUEST) { + /* + byte SSH_MSG_SERVICE_REQUEST + string service name + */ + serviceName = readString(payload, 1, 'ascii', self, callback); + if (serviceName === false) + return false; + + self.emit('SERVICE_REQUEST', serviceName); + } else if (type === MESSAGE.SERVICE_ACCEPT) { + /* + byte SSH_MSG_SERVICE_ACCEPT + string service name + */ + serviceName = readString(payload, 1, 'ascii', self, callback); + if (serviceName === false) + return false; + + self.emit('SERVICE_ACCEPT', serviceName); + } else if (type === MESSAGE.USERAUTH_REQUEST) { + /* + byte SSH_MSG_USERAUTH_REQUEST + string user name in ISO-10646 UTF-8 encoding [RFC3629] + string service name in US-ASCII + string method name in US-ASCII + .... method specific fields + */ + var username = readString(payload, 1, 'utf8', self, callback); + if (username === false) + return false; + var svcName = readString(payload, payload._pos, 'ascii', self, callback); + if (svcName === false) + return false; + var method = readString(payload, payload._pos, 'ascii', self, callback); + if (method === false) + return false; + var methodData; + + if (method === 'password') { + methodData = readString(payload, + payload._pos + 1, + 'utf8', + self, + callback); + if (methodData === false) + return false; + } else if (method === 'publickey' || method === 'hostbased') { + var pkSigned; + var keyAlgo; + var key; + var signature; + var blob; + var hostname; + var userlocal; + if (method === 'publickey') { + pkSigned = payload[payload._pos++]; + if (pkSigned === undefined) + return false; + pkSigned = (pkSigned !== 0); + } + keyAlgo = readString(payload, payload._pos, 'ascii', self, callback); + if (keyAlgo === false) + return false; + key = readString(payload, payload._pos, self, callback); + if (key === false) + return false; + + if (pkSigned || method === 'hostbased') { + if (method === 'hostbased') { + hostname = readString(payload, payload._pos, 'ascii', self, callback); + if (hostname === false) + return false; + userlocal = readString(payload, payload._pos, 'utf8', self, callback); + if (userlocal === false) + return false; + } + + var blobEnd = payload._pos; + signature = readString(payload, blobEnd, self, callback); + if (signature === false) + return false; + + if (signature.length > (4 + keyAlgo.length + 4) + && signature.toString('ascii', 4, 4 + keyAlgo.length) === keyAlgo) { + // Skip algoLen + algo + sigLen + signature = signature.slice(4 + keyAlgo.length + 4); + } + + if (keyAlgo === 'ssh-dss') { + signature = DSASigBareToBER(signature); + } else if (keyAlgo !== 'ssh-rsa' && keyAlgo !== 'ssh-dss') { + // ECDSA + signature = ECDSASigSSHToASN1(signature, self, callback); + if (signature === false) + return false; + } + + blob = new Buffer(4 + outstate.sessionId.length + blobEnd); + blob.writeUInt32BE(outstate.sessionId.length, 0, true); + outstate.sessionId.copy(blob, 4); + payload.copy(blob, 4 + outstate.sessionId.length, 0, blobEnd); + } + + methodData = { + keyAlgo: keyAlgo, + key: key, + signature: signature, + blob: blob, + localHostname: hostname, + localUsername: userlocal + }; + } else if (method === 'keyboard-interactive') { + // Skip language, it's deprecated + var skipLen = readInt(payload, payload._pos, self, callback); + if (skipLen === false) + return false; + methodData = readString(payload, + payload._pos + 4 + skipLen, + 'utf8', + self, + callback); + if (methodData === false) + return false; + } else if (method !== 'none') + methodData = payload.slice(payload._pos); + + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: USERAUTH_REQUEST (' + + method + + ')'); + + self._state.authsQueue.push(method); + self.emit('USERAUTH_REQUEST', username, svcName, method, methodData); + } else if (type === MESSAGE.USERAUTH_SUCCESS) { + /* + byte SSH_MSG_USERAUTH_SUCCESS + */ + if (outstate.compress.type === 'zlib@openssh.com') + outstate.compress.instance = zlib.createDeflate(ZLIB_OPTS); + if (instate.decompress.type === 'zlib@openssh.com') + instate.decompress.instance = zlib.createInflate(ZLIB_OPTS); + self._state.authsQueue.shift(); + self.emit('USERAUTH_SUCCESS'); + } else if (type === MESSAGE.USERAUTH_FAILURE) { + /* + byte SSH_MSG_USERAUTH_FAILURE + name-list authentications that can continue + boolean partial success + */ + var auths = readString(payload, 1, 'ascii', self, callback); + if (auths === false) + return false; + var partSuccess = payload[payload._pos]; + if (partSuccess === undefined) + return false; + + partSuccess = (partSuccess !== 0); + auths = auths.split(','); + + self._state.authsQueue.shift(); + self.emit('USERAUTH_FAILURE', auths, partSuccess); + } else if (type === MESSAGE.USERAUTH_BANNER) { + /* + byte SSH_MSG_USERAUTH_BANNER + string message in ISO-10646 UTF-8 encoding + string language tag + */ + message = readString(payload, 1, 'utf8', self, callback); + if (message === false) + return false; + lang = readString(payload, payload._pos, 'utf8', self, callback); + if (lang === false) + return false; + + self.emit('USERAUTH_BANNER', message, lang); + } else if (type === MESSAGE.GLOBAL_REQUEST) { + /* + byte SSH_MSG_GLOBAL_REQUEST + string request name in US-ASCII only + boolean want reply + .... request-specific data follows + */ + var request = readString(payload, 1, 'ascii', self, callback); + if (request === false) { + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: GLOBAL_REQUEST'); + return false; + } + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: GLOBAL_REQUEST (' + + request + + ')'); + + var wantReply = payload[payload._pos++]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + + var reqData; + if (request === 'tcpip-forward' || request === 'cancel-tcpip-forward') { + var bindAddr = readString(payload, payload._pos, 'ascii', self, callback); + if (bindAddr === false) + return false; + var bindPort = readInt(payload, payload._pos, self, callback); + if (bindPort === false) + return false; + reqData = { + bindAddr: bindAddr, + bindPort: bindPort + }; + } else if (request === 'streamlocal-forward@openssh.com' + || request === 'cancel-streamlocal-forward@openssh.com') { + socketPath = readString(payload, payload._pos, 'utf8', self, callback); + if (socketPath === false) + return false; + reqData = { + socketPath: socketPath + }; + } else if (request === 'no-more-sessions@openssh.com') { + // No data + } else { + reqData = payload.slice(payload._pos); + } + + self.emit('GLOBAL_REQUEST', request, wantReply, reqData); + } else if (type === MESSAGE.REQUEST_SUCCESS) { + /* + byte SSH_MSG_REQUEST_SUCCESS + .... response specific data + */ + if (payload.length > 1) + self.emit('REQUEST_SUCCESS', payload.slice(1)); + else + self.emit('REQUEST_SUCCESS'); + } else if (type === MESSAGE.REQUEST_FAILURE) { + /* + byte SSH_MSG_REQUEST_FAILURE + */ + self.emit('REQUEST_FAILURE'); + } else if (type === MESSAGE.UNIMPLEMENTED) { + /* + byte SSH_MSG_UNIMPLEMENTED + uint32 packet sequence number of rejected message + */ + // TODO + } else if (type === MESSAGE.KEXINIT) + return parse_KEXINIT(self, callback); + else if (type === MESSAGE.CHANNEL_REQUEST) + return parse_CHANNEL_REQUEST(self, callback); + else if (type >= 30 && type <= 49) // Key exchange method-specific messages + return parse_KEX(self, type, callback); + else if (type >= 60 && type <= 70) // User auth context-specific messages + return parse_USERAUTH(self, type, callback); + else { + // Unknown packet type + var unimpl = new Buffer(1 + 4); + unimpl[0] = MESSAGE.UNIMPLEMENTED; + unimpl.writeUInt32BE(seqno, 1, true); + send(self, unimpl); + } +} + +function parse_KEXINIT(self, callback) { + var instate = self._state.incoming; + var payload = instate.payload; + + /* + byte SSH_MSG_KEXINIT + byte[16] cookie (random bytes) + name-list kex_algorithms + name-list server_host_key_algorithms + name-list encryption_algorithms_client_to_server + name-list encryption_algorithms_server_to_client + name-list mac_algorithms_client_to_server + name-list mac_algorithms_server_to_client + name-list compression_algorithms_client_to_server + name-list compression_algorithms_server_to_client + name-list languages_client_to_server + name-list languages_server_to_client + boolean first_kex_packet_follows + uint32 0 (reserved for future extension) + */ + var init = { + algorithms: { + kex: undefined, + srvHostKey: undefined, + cs: { + encrypt: undefined, + mac: undefined, + compress: undefined + }, + sc: { + encrypt: undefined, + mac: undefined, + compress: undefined + } + }, + languages: { + cs: undefined, + sc: undefined + } + }; + var val; + + val = readList(payload, 17, self, callback); + if (val === false) + return false; + init.algorithms.kex = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.srvHostKey = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.cs.encrypt = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.sc.encrypt = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.cs.mac = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.sc.mac = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.cs.compress = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.algorithms.sc.compress = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.languages.cs = val; + val = readList(payload, payload._pos, self, callback); + if (val === false) + return false; + init.languages.sc = val; + + var firstFollows = (payload._pos < payload.length + && payload[payload._pos] === 1); + + instate.kexinit = payload; + + self.emit('KEXINIT', init, firstFollows); +} + +function parse_KEX(self, type, callback) { + var state = self._state; + var instate = state.incoming; + var payload = instate.payload; + var pktType = (RE_GEX.test(state.kexdh) + ? DYNAMIC_KEXDH_MESSAGE[type] + : KEXDH_MESSAGE[type]); + + if (state.outgoing.status === OUT_READY + || instate.expectedPacket !== pktType) { + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, expected: ' + + instate.expectedPacket + + ' but got: ' + + pktType); + self.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + var err = new Error('Received unexpected packet'); + err.level = 'protocol'; + self.emit('error', err); + return false; + } + + if (RE_GEX.test(state.kexdh)) { + // Dynamic group exchange-related + + if (self.server) { + // TODO: Support group exchange server-side + self.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + var err = new Error('DH group exchange not supported by server'); + err.level = 'handshake'; + self.emit('error', err); + return false; + } else { + if (type === MESSAGE.KEXDH_GEX_GROUP) { + /* + byte SSH_MSG_KEX_DH_GEX_GROUP + mpint p, safe prime + mpint g, generator for subgroup in GF(p) + */ + var prime = readString(payload, 1, self, callback); + if (prime === false) + return false; + var gen = readString(payload, payload._pos, self, callback); + if (gen === false) + return false; + self.emit('KEXDH_GEX_GROUP', prime, gen); + } else if (type === MESSAGE.KEXDH_GEX_REPLY) + return parse_KEXDH_REPLY(self, callback); + } + } else { + // Static group or ECDH-related + + if (type === MESSAGE.KEXDH_INIT) { + /* + byte SSH_MSG_KEXDH_INIT + mpint e + */ + var e = readString(payload, 1, self, callback); + if (e === false) + return false; + + self.emit('KEXDH_INIT', e); + } else if (type === MESSAGE.KEXDH_REPLY) + return parse_KEXDH_REPLY(self, callback); + } +} + +function parse_KEXDH_REPLY(self, callback) { + var payload = self._state.incoming.payload; + /* + byte SSH_MSG_KEXDH_REPLY + / SSH_MSG_KEX_DH_GEX_REPLY + / SSH_MSG_KEX_ECDH_REPLY + string server public host key and certificates (K_S) + mpint f + string signature of H + */ + var hostkey = readString(payload, 1, self, callback); + if (hostkey === false) + return false; + var pubkey = readString(payload, payload._pos, self, callback); + if (pubkey === false) + return false; + var sig = readString(payload, payload._pos, self, callback); + if (sig === false) + return false; + var info = { + hostkey: hostkey, + hostkey_format: undefined, + pubkey: pubkey, + sig: sig, + sig_format: undefined + }; + var hostkey_format = readString(hostkey, 0, 'ascii', self, callback); + if (hostkey_format === false) + return false; + info.hostkey_format = hostkey_format; + var sig_format = readString(sig, 0, 'ascii', self, callback); + if (sig_format === false) + return false; + info.sig_format = sig_format; + self.emit('KEXDH_REPLY', info); +} + +function parse_USERAUTH(self, type, callback) { + var state = self._state; + var authMethod = state.authsQueue[0]; + var payload = state.incoming.payload; + var message; + var lang; + var text; + + if (authMethod === 'password') { + if (type === MESSAGE.USERAUTH_PASSWD_CHANGEREQ) { + /* + byte SSH_MSG_USERAUTH_PASSWD_CHANGEREQ + string prompt in ISO-10646 UTF-8 encoding + string language tag + */ + message = readString(payload, 1, 'utf8', self, callback); + if (message === false) + return false; + lang = readString(payload, payload._pos, 'utf8', self, callback); + if (lang === false) + return false; + self.emit('USERAUTH_PASSWD_CHANGEREQ', message, lang); + } + } else if (authMethod === 'keyboard-interactive') { + if (type === MESSAGE.USERAUTH_INFO_REQUEST) { + /* + byte SSH_MSG_USERAUTH_INFO_REQUEST + string name (ISO-10646 UTF-8) + string instruction (ISO-10646 UTF-8) + string language tag -- MAY be empty + int num-prompts + string prompt[1] (ISO-10646 UTF-8) + boolean echo[1] + ... + string prompt[num-prompts] (ISO-10646 UTF-8) + boolean echo[num-prompts] + */ + var name; + var instr; + var nprompts; + + name = readString(payload, 1, 'utf8', self, callback); + if (name === false) + return false; + instr = readString(payload, payload._pos, 'utf8', self, callback); + if (instr === false) + return false; + lang = readString(payload, payload._pos, 'utf8', self, callback); + if (lang === false) + return false; + nprompts = readInt(payload, payload._pos, self, callback); + if (nprompts === false) + return false; + + payload._pos += 4; + + var prompts = []; + for (var prompt = 0; prompt < nprompts; ++prompt) { + text = readString(payload, payload._pos, 'utf8', self, callback); + if (text === false) + return false; + var echo = payload[payload._pos++]; + if (echo === undefined) + return false; + echo = (echo !== 0); + prompts.push({ + prompt: text, + echo: echo + }); + } + self.emit('USERAUTH_INFO_REQUEST', name, instr, lang, prompts); + } else if (type === MESSAGE.USERAUTH_INFO_RESPONSE) { + /* + byte SSH_MSG_USERAUTH_INFO_RESPONSE + int num-responses + string response[1] (ISO-10646 UTF-8) + ... + string response[num-responses] (ISO-10646 UTF-8) + */ + var nresponses = readInt(payload, 1, self, callback); + if (nresponses === false) + return false; + + payload._pos = 5; + + var responses = []; + for (var response = 0; response < nresponses; ++response) { + text = readString(payload, payload._pos, 'utf8', self, callback); + if (text === false) + return false; + responses.push(text); + } + self.emit('USERAUTH_INFO_RESPONSE', responses); + } + } else if (authMethod === 'publickey') { + if (type === MESSAGE.USERAUTH_PK_OK) { + /* + byte SSH_MSG_USERAUTH_PK_OK + string public key algorithm name from the request + string public key blob from the request + */ + var authsQueue = self._state.authsQueue; + if (!authsQueue.length || authsQueue[0] !== 'publickey') + return; + authsQueue.shift(); + self.emit('USERAUTH_PK_OK'); + // XXX: Parse public key info? client currently can ignore it because + // there is only one outstanding auth request at any given time, so it + // knows which key was OK'd + } + } else if (authMethod !== undefined) { + // Invalid packet for this auth type + self.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + var err = new Error('Invalid authentication method: ' + authMethod); + err.level = 'protocol'; + self.emit('error', err); + } +} + +function parse_CHANNEL_REQUEST(self, callback) { + var payload = self._state.incoming.payload; + var info; + var cols; + var rows; + var width; + var height; + var wantReply; + var signal; + + var recipient = readInt(payload, 1, self, callback); + if (recipient === false) + return false; + var request = readString(payload, 5, 'ascii', self, callback); + if (request === false) + return false; + + if (request === 'exit-status') { // Server->Client + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "exit-status" + boolean FALSE + uint32 exit_status + */ + var code = readInt(payload, ++payload._pos, self, callback); + if (code === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: false, + code: code + }; + } else if (request === 'exit-signal') { // Server->Client + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "exit-signal" + boolean FALSE + string signal name (without the "SIG" prefix) + boolean core dumped + string error message in ISO-10646 UTF-8 encoding + string language tag + */ + var coredump; + if (!(self.remoteBugs & BUGS.OLD_EXIT)) { + signal = readString(payload, ++payload._pos, 'ascii', self, callback); + if (signal === false) + return false; + coredump = payload[payload._pos++]; + if (coredump === undefined) + return false; + coredump = (coredump !== 0); + } else { + /* + Instead of `signal name` and `core dumped`, we have just: + + uint32 signal number + */ + signal = readInt(payload, ++payload._pos, self, callback); + if (signal === false) + return false; + switch (signal) { + case 1: + signal = 'HUP'; + break; + case 2: + signal = 'INT'; + break; + case 3: + signal = 'QUIT'; + break; + case 6: + signal = 'ABRT'; + break; + case 9: + signal = 'KILL'; + break; + case 14: + signal = 'ALRM'; + break; + case 15: + signal = 'TERM'; + break; + default: + // Unknown or OS-specific + signal = 'UNKNOWN (' + signal + ')'; + } + coredump = false; + } + var description = readString(payload, payload._pos, 'utf8', self, + callback); + if (description === false) + return false; + var lang = readString(payload, payload._pos, 'utf8', self, callback); + if (lang === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: false, + signal: signal, + coredump: coredump, + description: description, + lang: lang + }; + } else if (request === 'pty-req') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "pty-req" + boolean want_reply + string TERM environment variable value (e.g., vt100) + uint32 terminal width, characters (e.g., 80) + uint32 terminal height, rows (e.g., 24) + uint32 terminal width, pixels (e.g., 640) + uint32 terminal height, pixels (e.g., 480) + string encoded terminal modes + */ + wantReply = payload[payload._pos++]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + var term = readString(payload, payload._pos, 'ascii', self, callback); + if (term === false) + return false; + cols = readInt(payload, payload._pos, self, callback); + if (cols === false) + return false; + rows = readInt(payload, payload._pos += 4, self, callback); + if (rows === false) + return false; + width = readInt(payload, payload._pos += 4, self, callback); + if (width === false) + return false; + height = readInt(payload, payload._pos += 4, self, callback); + if (height === false) + return false; + var modes = readString(payload, payload._pos += 4, self, callback); + if (modes === false) + return false; + modes = bytesToModes(modes); + info = { + recipient: recipient, + request: request, + wantReply: wantReply, + term: term, + cols: cols, + rows: rows, + width: width, + height: height, + modes: modes + }; + } else if (request === 'window-change') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "window-change" + boolean FALSE + uint32 terminal width, columns + uint32 terminal height, rows + uint32 terminal width, pixels + uint32 terminal height, pixels + */ + cols = readInt(payload, ++payload._pos, self, callback); + if (cols === false) + return false; + rows = readInt(payload, payload._pos += 4, self, callback); + if (rows === false) + return false; + width = readInt(payload, payload._pos += 4, self, callback); + if (width === false) + return false; + height = readInt(payload, payload._pos += 4, self, callback); + if (height === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: false, + cols: cols, + rows: rows, + width: width, + height: height + }; + } else if (request === 'x11-req') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "x11-req" + boolean want reply + boolean single connection + string x11 authentication protocol + string x11 authentication cookie + uint32 x11 screen number + */ + wantReply = payload[payload._pos++]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + var single = payload[payload._pos++]; + if (single === undefined) + return false; + single = (single !== 0); + var protocol = readString(payload, payload._pos, 'ascii', self, callback); + if (protocol === false) + return false; + var cookie = readString(payload, payload._pos, 'hex', self, callback); + if (cookie === false) + return false; + var screen = readInt(payload, payload._pos, self, callback); + if (screen === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: wantReply, + single: single, + protocol: protocol, + cookie: cookie, + screen: screen + }; + } else if (request === 'env') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "env" + boolean want reply + string variable name + string variable value + */ + wantReply = payload[payload._pos++]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + var key = readString(payload, payload._pos, 'utf8', self, callback); + if (key === false) + return false; + var val = readString(payload, payload._pos, 'utf8', self, callback); + if (val === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: wantReply, + key: key, + val: val + }; + } else if (request === 'shell') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "shell" + boolean want reply + */ + wantReply = payload[payload._pos]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + info = { + recipient: recipient, + request: request, + wantReply: wantReply + }; + } else if (request === 'exec') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "exec" + boolean want reply + string command + */ + wantReply = payload[payload._pos++]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + var command = readString(payload, payload._pos, 'utf8', self, callback); + if (command === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: wantReply, + command: command + }; + } else if (request === 'subsystem') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "subsystem" + boolean want reply + string subsystem name + */ + wantReply = payload[payload._pos++]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + var subsystem = readString(payload, payload._pos, 'utf8', self, callback); + if (subsystem === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: wantReply, + subsystem: subsystem + }; + } else if (request === 'signal') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "signal" + boolean FALSE + string signal name (without the "SIG" prefix) + */ + signal = readString(payload, ++payload._pos, 'ascii', self, callback); + if (signal === false) + return false; + info = { + recipient: recipient, + request: request, + wantReply: false, + signal: 'SIG' + signal + }; + } else if (request === 'xon-xoff') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "xon-xoff" + boolean FALSE + boolean client can do + */ + var clientControl = payload[++payload._pos]; + if (clientControl === undefined) + return false; + clientControl = (clientControl !== 0); + info = { + recipient: recipient, + request: request, + wantReply: false, + clientControl: clientControl + }; + } else if (request === 'auth-agent-req@openssh.com') { // Client->Server + /* + byte SSH_MSG_CHANNEL_REQUEST + uint32 recipient channel + string "auth-agent-req@openssh.com" + boolean want reply + */ + wantReply = payload[payload._pos]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + info = { + recipient: recipient, + request: request, + wantReply: wantReply + }; + } else { + // Unknown request type + wantReply = payload[payload._pos]; + if (wantReply === undefined) + return false; + wantReply = (wantReply !== 0); + info = { + recipient: recipient, + request: request, + wantReply: wantReply + }; + } + self.debug('DEBUG: Parser: IN_PACKETDATAAFTER, packet: CHANNEL_REQUEST (' + + recipient + + ', ' + + request + + ')'); + self.emit('CHANNEL_REQUEST:' + recipient, info); +} + +function hmacVerify(self, data) { + var instate = self._state.incoming; + var hmac = instate.hmac; + + self.debug('DEBUG: Parser: Verifying MAC'); + + if (instate.decrypt.isGCM) { + var decrypt = instate.decrypt; + var instance = decrypt.instance; + + instance.setAuthTag(data); + + var payload = instance.update(instate.packet); + instate.payload = payload.slice(1, instate.packet.length + 4 - payload[0]); + //instance.final(); + iv_inc(decrypt.iv); + + decrypt.instance = crypto.createDecipheriv( + SSH_TO_OPENSSL[decrypt.type], + decrypt.key, + decrypt.iv + ); + decrypt.instance.setAutoPadding(false); + return true; + } else { + var calcHmac = crypto.createHmac(SSH_TO_OPENSSL[hmac.type], hmac.key); + + hmac.bufCompute.writeUInt32BE(instate.seqno, 0, true); + hmac.bufCompute.writeUInt32BE(instate.pktLen, 4, true); + hmac.bufCompute[8] = instate.padLen; + + calcHmac.update(hmac.bufCompute); + calcHmac.update(instate.packet); + + var mac = calcHmac.digest('binary'); + if (mac.length > instate.hmac.size) + mac = mac.slice(0, instate.hmac.size); + return (mac === data.toString('binary')); + } +} + +function decryptData(self, data) { + var instance = self._state.incoming.decrypt.instance; + self.debug('DEBUG: Parser: Decrypting'); + return instance.update(data); +} + +function expectData(self, type, amount, bufferKey) { + var expect = self._state.incoming.expect; + expect.amount = amount; + expect.type = type; + expect.ptr = 0; + if (bufferKey && self[bufferKey]) + expect.buf = self[bufferKey]; + else if (amount) + expect.buf = new Buffer(amount); +} + +function readList(buffer, start, stream, callback) { + var list = readString(buffer, start, 'ascii', stream, callback); + return (list !== false ? (list.length ? list.split(',') : []) : false); +} + +function bytesToModes(buffer) { + var modes = {}; + + for (var i = 0, len = buffer.length, opcode; i < len; i += 5) { + opcode = buffer[i]; + if (opcode === TERMINAL_MODE.TTY_OP_END + || TERMINAL_MODE[opcode] === undefined + || i + 5 > len) + break; + modes[TERMINAL_MODE[opcode]] = buffer.readUInt32BE(i + 1, true); + } + + return modes; +} + +function modesToBytes(modes) { + var RE_IS_NUM = /^\d+$/; + var keys = Object.keys(modes); + var b = 0; + var bytes = []; + + for (var i = 0, len = keys.length, key, opcode, val; i < len; ++i) { + key = keys[i]; + opcode = TERMINAL_MODE[key]; + if (opcode + && !RE_IS_NUM.test(key) + && typeof modes[key] === 'number' + && key !== 'TTY_OP_END') { + val = modes[key]; + bytes[b++] = opcode; + bytes[b++] = (val >>> 24) & 0xFF; + bytes[b++] = (val >>> 16) & 0xFF; + bytes[b++] = (val >>> 8) & 0xFF; + bytes[b++] = val & 0xFF; + } + } + + bytes[b] = TERMINAL_MODE.TTY_OP_END; + + return bytes; +} + +// Shared outgoing functions +function KEXINIT(self, cb) { // Client/Server + randBytes(16, function(myCookie) { + /* + byte SSH_MSG_KEXINIT + byte[16] cookie (random bytes) + name-list kex_algorithms + name-list server_host_key_algorithms + name-list encryption_algorithms_client_to_server + name-list encryption_algorithms_server_to_client + name-list mac_algorithms_client_to_server + name-list mac_algorithms_server_to_client + name-list compression_algorithms_client_to_server + name-list compression_algorithms_server_to_client + name-list languages_client_to_server + name-list languages_server_to_client + boolean first_kex_packet_follows + uint32 0 (reserved for future extension) + */ + var algos = self.config.algorithms; + + var kexBuf = algos.kexBuf; + if (self.remoteBugs & BUGS.BAD_DHGEX) { + var copied = false; + var kexList = algos.kex; + for (var j = kexList.length - 1; j >= 0; --j) { + if (kexList[j].indexOf('group-exchange') !== -1) { + if (!copied) { + kexList = kexList.slice(); + copied = true; + } + kexList.splice(j, 1); + } + } + if (copied) + kexBuf = new Buffer(kexList.join(',')); + } + + var hostKeyBuf = algos.serverHostKeyBuf; + + var kexInitSize = 1 + 16 + + 4 + kexBuf.length + + 4 + hostKeyBuf.length + + (2 * (4 + algos.cipherBuf.length)) + + (2 * (4 + algos.hmacBuf.length)) + + (2 * (4 + algos.compressBuf.length)) + + (2 * (4 /* languages skipped */)) + + 1 + 4; + var buf = new Buffer(kexInitSize); + var p = 17; + + buf.fill(0); + + buf[0] = MESSAGE.KEXINIT; + + if (myCookie !== false) + myCookie.copy(buf, 1); + + buf.writeUInt32BE(kexBuf.length, p, true); + p += 4; + kexBuf.copy(buf, p); + p += kexBuf.length; + + buf.writeUInt32BE(hostKeyBuf.length, p, true); + p += 4; + hostKeyBuf.copy(buf, p); + p += hostKeyBuf.length; + + buf.writeUInt32BE(algos.cipherBuf.length, p, true); + p += 4; + algos.cipherBuf.copy(buf, p); + p += algos.cipherBuf.length; + + buf.writeUInt32BE(algos.cipherBuf.length, p, true); + p += 4; + algos.cipherBuf.copy(buf, p); + p += algos.cipherBuf.length; + + buf.writeUInt32BE(algos.hmacBuf.length, p, true); + p += 4; + algos.hmacBuf.copy(buf, p); + p += algos.hmacBuf.length; + + buf.writeUInt32BE(algos.hmacBuf.length, p, true); + p += 4; + algos.hmacBuf.copy(buf, p); + p += algos.hmacBuf.length; + + buf.writeUInt32BE(algos.compressBuf.length, p, true); + p += 4; + algos.compressBuf.copy(buf, p); + p += algos.compressBuf.length; + + buf.writeUInt32BE(algos.compressBuf.length, p, true); + p += 4; + algos.compressBuf.copy(buf, p); + p += algos.compressBuf.length; + + // Skip language lists, first_kex_packet_follows, and reserved bytes + + self.debug('DEBUG: Outgoing: Writing KEXINIT'); + + self._state.incoming.expectedPacket = 'KEXINIT'; + + var outstate = self._state.outgoing; + + outstate.kexinit = buf; + + if (outstate.status === OUT_READY) { + // We are the one starting the rekeying process ... + outstate.status = OUT_REKEYING; + } + + send(self, buf, cb, true); + }); + return true; +} + +function KEXDH_INIT(self) { // Client + var state = self._state; + var outstate = state.outgoing; + var buf = new Buffer(1 + 4 + outstate.pubkey.length); + + if (RE_GEX.test(state.kexdh)) { + state.incoming.expectedPacket = 'KEXDH_GEX_REPLY'; + buf[0] = MESSAGE.KEXDH_GEX_INIT; + self.debug('DEBUG: Outgoing: Writing KEXDH_GEX_INIT'); + } else { + state.incoming.expectedPacket = 'KEXDH_REPLY'; + buf[0] = MESSAGE.KEXDH_INIT; + if (state.kexdh !== 'group') + self.debug('DEBUG: Outgoing: Writing KEXECDH_INIT'); + else + self.debug('DEBUG: Outgoing: Writing KEXDH_INIT'); + } + + buf.writeUInt32BE(outstate.pubkey.length, 1, true); + outstate.pubkey.copy(buf, 5); + + return send(self, buf, undefined, true); +} + +function KEXDH_REPLY(self, e) { // Server + var state = self._state; + var outstate = state.outgoing; + var instate = state.incoming; + var curHostKey = self.config.hostKeys[state.hostkeyFormat]; + var hostkey = curHostKey.publicKey.public; + var hostkeyAlgo = curHostKey.publicKey.fulltype; + var privateKey = curHostKey.privateKey.privateOrig; + + // e === client DH public key + + var slicepos = -1; + for (var i = 0, len = e.length; i < len; ++i) { + if (e[i] === 0) + ++slicepos; + else + break; + } + if (slicepos > -1) + e = e.slice(slicepos + 1); + + var secret = tryComputeSecret(state.kex, e); + if (secret instanceof Error) { + secret.message = 'Error while computing DH secret (' + + state.kexdh + '): ' + + secret.message; + secret.level = 'handshake'; + self.emit('error', secret); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + var hashAlgo; + if (state.kexdh === 'group') + hashAlgo = 'sha1'; + else + hashAlgo = RE_KEX_HASH.exec(state.kexdh)[1]; + + var hash = crypto.createHash(hashAlgo); + + var len_ident = Buffer.byteLength(instate.identRaw); + var len_sident = Buffer.byteLength(self.config.ident); + var len_init = instate.kexinit.length; + var len_sinit = outstate.kexinit.length; + var len_hostkey = hostkey.length; + var len_pubkey = e.length; + var len_spubkey = outstate.pubkey.length; + var len_secret = secret.length; + + var idx_spubkey = 0; + var idx_secret = 0; + + while (outstate.pubkey[idx_spubkey] === 0x00) { + ++idx_spubkey; + --len_spubkey; + } + while (secret[idx_secret] === 0x00) { + ++idx_secret; + --len_secret; + } + if (e[0] & 0x80) + ++len_pubkey; + if (outstate.pubkey[idx_spubkey] & 0x80) + ++len_spubkey; + if (secret[idx_secret] & 0x80) + ++len_secret; + + var exchangeBufLen = len_ident + + len_sident + + len_init + + len_sinit + + len_hostkey + + len_pubkey + + len_spubkey + + len_secret + + (4 * 8); // Length fields for above values + + // Group exchange-related + var isGEX = RE_GEX.test(state.kexdh); + var len_gex_prime = 0; + var len_gex_gen = 0; + var idx_gex_prime = 0; + var idx_gex_gen = 0; + var gex_prime; + var gex_gen; + if (isGEX) { + gex_prime = state.kex.getPrime(); + gex_gen = state.kex.getGenerator(); + len_gex_prime = gex_prime.length; + len_gex_gen = gex_gen.length; + while (gex_prime[idx_gex_prime] === 0x00) { + ++idx_gex_prime; + --len_gex_prime; + } + while (gex_gen[idx_gex_gen] === 0x00) { + ++idx_gex_gen; + --len_gex_gen; + } + if (gex_prime[idx_gex_prime] & 0x80) + ++len_gex_prime; + if (gex_gen[idx_gex_gen] & 0x80) + ++len_gex_gen; + exchangeBufLen += (4 * 3); // min, n, max values + exchangeBufLen += (4 * 2); // prime, generator length fields + exchangeBufLen += len_gex_prime; + exchangeBufLen += len_gex_gen; + } + + var bp = 0; + var exchangeBuf = new Buffer(exchangeBufLen); + + exchangeBuf.writeUInt32BE(len_ident, bp, true); + bp += 4; + exchangeBuf.write(instate.identRaw, bp, 'utf8'); // V_C + bp += len_ident; + + exchangeBuf.writeUInt32BE(len_sident, bp, true); + bp += 4; + exchangeBuf.write(self.config.ident, bp, 'utf8'); // V_S + bp += len_sident; + + exchangeBuf.writeUInt32BE(len_init, bp, true); + bp += 4; + instate.kexinit.copy(exchangeBuf, bp); // I_C + bp += len_init; + instate.kexinit = undefined; + + exchangeBuf.writeUInt32BE(len_sinit, bp, true); + bp += 4; + outstate.kexinit.copy(exchangeBuf, bp); // I_S + bp += len_sinit; + outstate.kexinit = undefined; + + exchangeBuf.writeUInt32BE(len_hostkey, bp, true); + bp += 4; + hostkey.copy(exchangeBuf, bp); // K_S + bp += len_hostkey; + + if (isGEX) { + KEXDH_GEX_REQ_PACKET.slice(1).copy(exchangeBuf, bp); // min, n, max + bp += (4 * 3); // Skip over bytes just copied + + exchangeBuf.writeUInt32BE(len_gex_prime, bp, true); + bp += 4; + if (gex_prime[idx_gex_prime] & 0x80) + exchangeBuf[bp++] = 0; + gex_prime.copy(exchangeBuf, bp, idx_gex_prime); // p + bp += len_gex_prime - (gex_prime[idx_gex_prime] & 0x80 ? 1 : 0); + + exchangeBuf.writeUInt32BE(len_gex_gen, bp, true); + bp += 4; + if (gex_gen[idx_gex_gen] & 0x80) + exchangeBuf[bp++] = 0; + gex_gen.copy(exchangeBuf, bp, idx_gex_gen); // g + bp += len_gex_gen - (gex_gen[idx_gex_gen] & 0x80 ? 1 : 0); + } + + exchangeBuf.writeUInt32BE(len_pubkey, bp, true); + bp += 4; + if (e[0] & 0x80) + exchangeBuf[bp++] = 0; + e.copy(exchangeBuf, bp); // e + bp += len_pubkey - (e[0] & 0x80 ? 1 : 0); + + exchangeBuf.writeUInt32BE(len_spubkey, bp, true); + bp += 4; + if (outstate.pubkey[idx_spubkey] & 0x80) + exchangeBuf[bp++] = 0; + outstate.pubkey.copy(exchangeBuf, bp, idx_spubkey); // f + bp += len_spubkey - (outstate.pubkey[idx_spubkey] & 0x80 ? 1 : 0); + + exchangeBuf.writeUInt32BE(len_secret, bp, true); + bp += 4; + if (secret[idx_secret] & 0x80) + exchangeBuf[bp++] = 0; + secret.copy(exchangeBuf, bp, idx_secret); // K + + outstate.exchangeHash = hash.update(exchangeBuf).digest(); // H + + if (outstate.sessionId === undefined) + outstate.sessionId = outstate.exchangeHash; + outstate.kexsecret = secret; + + var signAlgo; + switch (hostkeyAlgo) { + case 'ssh-rsa': + signAlgo = 'RSA-SHA1'; + break; + case 'ssh-dss': + signAlgo = 'DSA-SHA1'; + break; + case 'ecdsa-sha2-nistp256': + signAlgo = 'sha256'; + break; + case 'ecdsa-sha2-nistp384': + signAlgo = 'sha384'; + break; + case 'ecdsa-sha2-nistp521': + signAlgo = 'sha512'; + break; + } + var signer = crypto.createSign(signAlgo); + var signature; + signer.update(outstate.exchangeHash); + signature = trySign(signer, privateKey); + if (signature instanceof Error) { + signature.message = 'Error while signing data with host key (' + + hostkeyAlgo + '): ' + + signature.message; + signature.level = 'handshake'; + self.emit('error', signature); + self.disconnect(DISCONNECT_REASON.KEY_EXCHANGE_FAILED); + return false; + } + + if (signAlgo === 'DSA-SHA1') { + signature = DSASigBERToBare(signature); + } else if (signAlgo !== 'RSA-SHA1') { + // ECDSA + signature = ECDSASigASN1ToSSH(signature); + } + + /* + byte SSH_MSG_KEXDH_REPLY + string server public host key and certificates (K_S) + mpint f + string signature of H + */ + + var siglen = 4 + hostkeyAlgo.length + 4 + signature.length; + var buf = new Buffer(1 + + 4 + len_hostkey + + 4 + len_spubkey + + 4 + siglen); + + bp = 0; + buf[bp] = (!isGEX ? MESSAGE.KEXDH_REPLY : MESSAGE.KEXDH_GEX_REPLY); + ++bp; + + buf.writeUInt32BE(len_hostkey, bp, true); + bp += 4; + hostkey.copy(buf, bp); // K_S + bp += len_hostkey; + + buf.writeUInt32BE(len_spubkey, bp, true); + bp += 4; + if (outstate.pubkey[idx_spubkey] & 0x80) + buf[bp++] = 0; + outstate.pubkey.copy(buf, bp, idx_spubkey); // f + bp += len_spubkey - (outstate.pubkey[idx_spubkey] & 0x80 ? 1 : 0); + + buf.writeUInt32BE(siglen, bp, true); + bp += 4; + buf.writeUInt32BE(hostkeyAlgo.length, bp, true); + bp += 4; + buf.write(hostkeyAlgo, bp, hostkeyAlgo.length, 'ascii'); + bp += hostkeyAlgo.length; + buf.writeUInt32BE(signature.length, bp, true); + bp += 4; + signature.copy(buf, bp); + + state.incoming.expectedPacket = 'NEWKEYS'; + + if (isGEX) + self.debug('DEBUG: Outgoing: Writing KEXDH_GEX_REPLY'); + else if (state.kexdh !== 'group') + self.debug('DEBUG: Outgoing: Writing KEXECDH_REPLY'); + else + self.debug('DEBUG: Outgoing: Writing KEXDH_REPLY'); + send(self, buf, undefined, true); + + outstate.sentNEWKEYS = true; + self.debug('DEBUG: Outgoing: Writing NEWKEYS'); + return send(self, NEWKEYS_PACKET, undefined, true); +} + +function KEXDH_GEX_REQ(self) { // Client + self._state.incoming.expectedPacket = 'KEXDH_GEX_GROUP'; + + self.debug('DEBUG: Outgoing: Writing KEXDH_GEX_REQUEST'); + return send(self, KEXDH_GEX_REQ_PACKET, undefined, true); +} + +function send(self, payload, cb, bypass) { + var state = self._state; + + if (!state) + return false; + + var outstate = state.outgoing; + if (outstate.status === OUT_REKEYING && !bypass) { + if (typeof cb === 'function') + outstate.rekeyQueue.push([payload, cb]); + else + outstate.rekeyQueue.push(payload); + return false; + } else if (self._readableState.ended || self._writableState.ended) + return false; + + var compress = outstate.compress.instance; + if (compress) { + compress.write(payload); + compress.flush(Z_PARTIAL_FLUSH, function() { + if (self._readableState.ended || self._writableState.ended) + return; + send_(self, compress.read(), cb); + }); + return true; + } else + return send_(self, payload, cb); +} + +function send_(self, payload, cb) { + // TODO: Implement length checks + + var state = self._state; + var outstate = state.outgoing; + var encrypt = outstate.encrypt; + var hmac = outstate.hmac; + var pktLen; + var padLen; + var buf; + var mac; + var ret; + + pktLen = payload.length + 9; + + if (encrypt.instance !== false && encrypt.isGCM) { + var ptlen = 1 + payload.length + 4 /* Must have at least 4 bytes padding*/; + while ((ptlen % encrypt.size) !== 0) + ++ptlen; + padLen = ptlen - 1 - payload.length; + pktLen = 4 + ptlen; + } else { + pktLen += ((encrypt.size - 1) * pktLen) % encrypt.size; + padLen = pktLen - payload.length - 5; + } + + buf = new Buffer(pktLen); + + buf.writeUInt32BE(pktLen - 4, 0, true); + buf[4] = padLen; + payload.copy(buf, 5); + + var padBytes = crypto.randomBytes(padLen); + padBytes.copy(buf, 5 + payload.length); + + if (hmac.type !== false && hmac.key) { + mac = crypto.createHmac(SSH_TO_OPENSSL[hmac.type], hmac.key); + outstate.bufSeqno.writeUInt32BE(outstate.seqno, 0, true); + mac.update(outstate.bufSeqno); + mac.update(buf); + mac = mac.digest(); + if (mac.length > outstate.hmac.size) + mac = mac.slice(0, outstate.hmac.size); + } + + var nb = 0; + var encData; + + if (encrypt.instance !== false) { + if (encrypt.isGCM) { + var encrypter = crypto.createCipheriv(SSH_TO_OPENSSL[encrypt.type], + encrypt.key, + encrypt.iv); + encrypter.setAutoPadding(false); + + var lenbuf = buf.slice(0, 4); + + encrypter.setAAD(lenbuf); + self.push(lenbuf); + nb += lenbuf; + + encData = encrypter.update(buf.slice(4)); + self.push(encData); + nb += encData.length; + + var final = encrypter.final(); + if (final.length) { + self.push(final); + nb += final.length; + } + + var authTag = encrypter.getAuthTag(); + ret = self.push(authTag); + nb += authTag.length; + + iv_inc(encrypt.iv); + } else { + encData = encrypt.instance.update(buf); + self.push(encData); + nb += encData.length; + + ret = self.push(mac); + nb += mac.length; + } + } else { + ret = self.push(buf); + nb = buf.length; + } + + self.bytesSent += nb; + + if (++outstate.seqno > MAX_SEQNO) + outstate.seqno = 0; + + cb && cb(); + + return ret; +} + +function randBytes(n, cb) { + crypto.randomBytes(n, function retry(err, buf) { + if (err) + return crypto.randomBytes(n, retry); + cb && cb(buf); + }); +} + +function trySign(sig, key) { + try { + return sig.sign(key); + } catch (err) { + return err; + } +} + +function tryComputeSecret(dh, e) { + try { + return dh.computeSecret(e); + } catch (err) { + return err; + } +} + +module.exports = SSH2Stream; +module.exports._send = send; diff --git a/reverse_engineering/node_modules/ssh2-streams/lib/utils.js b/reverse_engineering/node_modules/ssh2-streams/lib/utils.js new file mode 100644 index 0000000..fdb945e --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/lib/utils.js @@ -0,0 +1,817 @@ +var crypto = require('crypto'); + +var Ber = require('asn1').Ber; +var BigInteger = require('./jsbn'); // only for converting PPK -> OpenSSL format + +var SSH_TO_OPENSSL = require('./constants').SSH_TO_OPENSSL; + +var RE_STREAM = /^arcfour/i; +var RE_KEY_LEN = /(.{64})/g; +// XXX the value of 2400 from dropbear is only for certain strings, not all +// strings. for example the list strings used during handshakes +var MAX_STRING_LEN = Infinity;//2400; // taken from dropbear +var PPK_IV = new Buffer([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]); + +module.exports = { + iv_inc: iv_inc, + isStreamCipher: isStreamCipher, + readInt: readInt, + readString: readString, + parseKey: require('./keyParser'), + genPublicKey: genPublicKey, + convertPPKPrivate: convertPPKPrivate, + verifyPPKMAC: verifyPPKMAC, + decryptKey: decryptKey, + DSASigBERToBare: DSASigBERToBare, + DSASigBareToBER: DSASigBareToBER, + ECDSASigASN1ToSSH: ECDSASigASN1ToSSH, + ECDSASigSSHToASN1: ECDSASigSSHToASN1, + RSAKeySSHToASN1: RSAKeySSHToASN1, + DSAKeySSHToASN1: DSAKeySSHToASN1, + ECDSAKeySSHToASN1: ECDSAKeySSHToASN1 +}; + +function iv_inc(iv) { + var n = 12; + var c = 0; + do { + --n; + c = iv[n]; + if (c === 255) + iv[n] = 0; + else { + iv[n] = ++c; + return; + } + } while (n > 4); +} + +function isStreamCipher(name) { + return RE_STREAM.test(name); +} + +function readInt(buffer, start, stream, cb) { + var bufferLen = buffer.length; + if (start < 0 || start >= bufferLen || (bufferLen - start) < 4) { + stream && stream._cleanup(cb); + return false; + } + + return buffer.readUInt32BE(start, true); +} + +function DSASigBERToBare(signature) { + if (signature.length <= 40) + return signature; + // This is a quick and dirty way to get from BER encoded r and s that + // OpenSSL gives us, to just the bare values back to back (40 bytes + // total) like OpenSSH (and possibly others) are expecting + var asnReader = new Ber.Reader(signature); + asnReader.readSequence(); + var r = asnReader.readString(Ber.Integer, true); + var s = asnReader.readString(Ber.Integer, true); + var rOffset = 0; + var sOffset = 0; + if (r.length < 20) { + var rNew = new Buffer(20); + r.copy(rNew, 1); + r = rNew; + r[0] = 0; + } + if (s.length < 20) { + var sNew = new Buffer(20); + s.copy(sNew, 1); + s = sNew; + s[0] = 0; + } + if (r.length > 20 && r[0] === 0x00) + rOffset = 1; + if (s.length > 20 && s[0] === 0x00) + sOffset = 1; + var newSig = new Buffer((r.length - rOffset) + (s.length - sOffset)); + r.copy(newSig, 0, rOffset); + s.copy(newSig, r.length - rOffset, sOffset); + return newSig; +} + +function DSASigBareToBER(signature) { + if (signature.length > 40) + return signature; + // Change bare signature r and s values to ASN.1 BER values for OpenSSL + var asnWriter = new Ber.Writer(); + asnWriter.startSequence(); + var r = signature.slice(0, 20); + var s = signature.slice(20); + if (r[0] & 0x80) { + var rNew = new Buffer(21); + rNew[0] = 0x00; + r.copy(rNew, 1); + r = rNew; + } else if (r[0] === 0x00 && !(r[1] & 0x80)) { + r = r.slice(1); + } + if (s[0] & 0x80) { + var sNew = new Buffer(21); + sNew[0] = 0x00; + s.copy(sNew, 1); + s = sNew; + } else if (s[0] === 0x00 && !(s[1] & 0x80)) { + s = s.slice(1); + } + asnWriter.writeBuffer(r, Ber.Integer); + asnWriter.writeBuffer(s, Ber.Integer); + asnWriter.endSequence(); + return asnWriter.buffer; +} + +function ECDSASigASN1ToSSH(signature) { + if (signature[0] === 0x00) + return signature; + // Convert SSH signature parameters to ASN.1 BER values for OpenSSL + var asnReader = new Ber.Reader(signature); + asnReader.readSequence(); + var r = asnReader.readString(Ber.Integer, true); + var s = asnReader.readString(Ber.Integer, true); + if (r === null || s === null) + throw new Error('Invalid signature'); + var newSig = new Buffer(4 + r.length + 4 + s.length); + newSig.writeUInt32BE(r.length, 0, true); + r.copy(newSig, 4); + newSig.writeUInt32BE(s.length, 4 + r.length, true); + s.copy(newSig, 4 + 4 + r.length); + return newSig; +} + +function ECDSASigSSHToASN1(signature, self, callback) { + // Convert SSH signature parameters to ASN.1 BER values for OpenSSL + var r = readString(signature, 0, self, callback); + if (r === false) + return false; + var s = readString(signature, signature._pos, self, callback); + if (s === false) + return false; + + var asnWriter = new Ber.Writer(); + asnWriter.startSequence(); + asnWriter.writeBuffer(r, Ber.Integer); + asnWriter.writeBuffer(s, Ber.Integer); + asnWriter.endSequence(); + return asnWriter.buffer; +} + +function RSAKeySSHToASN1(key, self, callback) { + // Convert SSH key parameters to ASN.1 BER values for OpenSSL + var e = readString(key, key._pos, self, callback); + if (e === false) + return false; + var n = readString(key, key._pos, self, callback); + if (n === false) + return false; + + var asnWriter = new Ber.Writer(); + asnWriter.startSequence(); + // algorithm + asnWriter.startSequence(); + asnWriter.writeOID('1.2.840.113549.1.1.1'); // rsaEncryption + // algorithm parameters (RSA has none) + asnWriter.writeNull(); + asnWriter.endSequence(); + + // subjectPublicKey + asnWriter.startSequence(Ber.BitString); + asnWriter.writeByte(0x00); + asnWriter.startSequence(); + asnWriter.writeBuffer(n, Ber.Integer); + asnWriter.writeBuffer(e, Ber.Integer); + asnWriter.endSequence(); + asnWriter.endSequence(); + asnWriter.endSequence(); + return asnWriter.buffer; +} + +function DSAKeySSHToASN1(key, self, callback) { + // Convert SSH key parameters to ASN.1 BER values for OpenSSL + var p = readString(key, key._pos, self, callback); + if (p === false) + return false; + var q = readString(key, key._pos, self, callback); + if (q === false) + return false; + var g = readString(key, key._pos, self, callback); + if (g === false) + return false; + var y = readString(key, key._pos, self, callback); + if (y === false) + return false; + + var asnWriter = new Ber.Writer(); + asnWriter.startSequence(); + // algorithm + asnWriter.startSequence(); + asnWriter.writeOID('1.2.840.10040.4.1'); // id-dsa + // algorithm parameters + asnWriter.startSequence(); + asnWriter.writeBuffer(p, Ber.Integer); + asnWriter.writeBuffer(q, Ber.Integer); + asnWriter.writeBuffer(g, Ber.Integer); + asnWriter.endSequence(); + asnWriter.endSequence(); + + // subjectPublicKey + asnWriter.startSequence(Ber.BitString); + asnWriter.writeByte(0x00); + asnWriter.writeBuffer(y, Ber.Integer); + asnWriter.endSequence(); + asnWriter.endSequence(); + return asnWriter.buffer; +} + +function ECDSAKeySSHToASN1(key, self, callback) { + // Convert SSH key parameters to ASN.1 BER values for OpenSSL + var curve = readString(key, key._pos, self, callback); + if (curve === false) + return false; + var Q = readString(key, key._pos, self, callback); + if (Q === false) + return false; + + var ecCurveOID; + switch (curve.toString('ascii')) { + case 'nistp256': + // prime256v1/secp256r1 + ecCurveOID = '1.2.840.10045.3.1.7'; + break; + case 'nistp384': + // secp384r1 + ecCurveOID = '1.3.132.0.34'; + break; + case 'nistp521': + // secp521r1 + ecCurveOID = '1.3.132.0.35'; + break; + default: + return false; + } + var asnWriter = new Ber.Writer(); + asnWriter.startSequence(); + // algorithm + asnWriter.startSequence(); + asnWriter.writeOID('1.2.840.10045.2.1'); // id-ecPublicKey + // algorithm parameters (namedCurve) + asnWriter.writeOID(ecCurveOID); + asnWriter.endSequence(); + + // subjectPublicKey + asnWriter.startSequence(Ber.BitString); + asnWriter.writeByte(0x00); + // XXX: hack to write a raw buffer without a tag -- yuck + asnWriter._ensure(Q.length); + Q.copy(asnWriter._buf, asnWriter._offset, 0, Q.length); + asnWriter._offset += Q.length; + // end hack + asnWriter.endSequence(); + asnWriter.endSequence(); + return asnWriter.buffer; +} + +function decryptKey(keyInfo, passphrase) { + if (keyInfo._decrypted || !keyInfo.encryption) + return; + + var keylen = 0; + var key; + var iv; + var dc; + + keyInfo.encryption = (SSH_TO_OPENSSL[keyInfo.encryption] + || keyInfo.encryption); + switch (keyInfo.encryption) { + case 'aes-256-cbc': + case 'aes-256-ctr': + keylen = 32; + break; + case 'des-ede3-cbc': + case 'des-ede3': + case 'aes-192-cbc': + case 'aes-192-ctr': + keylen = 24; + break; + case 'aes-128-cbc': + case 'aes-128-ctr': + case 'cast-cbc': + case 'bf-cbc': + keylen = 16; + break; + default: + throw new Error('Unsupported cipher for encrypted key: ' + + keyInfo.encryption); + } + + if (keyInfo.ppk) { + iv = PPK_IV; + + key = Buffer.concat([ + crypto.createHash('sha1') + .update('\x00\x00\x00\x00' + passphrase, 'utf8') + .digest(), + crypto.createHash('sha1') + .update('\x00\x00\x00\x01' + passphrase, 'utf8') + .digest() + ]); + key = key.slice(0, keylen); + } else { + iv = new Buffer(keyInfo.extra[0], 'hex'); + + key = crypto.createHash('md5') + .update(passphrase, 'utf8') + .update(iv.slice(0, 8)) + .digest(); + + while (keylen > key.length) { + key = Buffer.concat([ + key, + (crypto.createHash('md5') + .update(key) + .update(passphrase, 'utf8') + .update(iv) + .digest()).slice(0, 8) + ]); + } + if (key.length > keylen) + key = key.slice(0, keylen); + } + + dc = crypto.createDecipheriv(keyInfo.encryption, key, iv); + dc.setAutoPadding(false); + keyInfo.private = Buffer.concat([ dc.update(keyInfo.private), dc.final() ]); + + keyInfo._decrypted = true; + + if (keyInfo.privateOrig) { + // Update our original base64-encoded version of the private key + var orig = keyInfo.privateOrig.toString('utf8'); + var newOrig = /^(.+(?:\r\n|\n))/.exec(orig)[1]; + var b64key = keyInfo.private.toString('base64'); + + newOrig += b64key.match(/.{1,70}/g).join('\n'); + newOrig += /((?:\r\n|\n).+)$/.exec(orig)[1]; + + keyInfo.privateOrig = newOrig; + } else if (keyInfo.ppk) { + var valid = verifyPPKMAC(keyInfo, passphrase, keyInfo.private); + if (!valid) + throw new Error('PPK MAC mismatch'); + // Automatically convert private key data to OpenSSL format + // (including PEM) + convertPPKPrivate(keyInfo); + } + + // Fill in full key type + // TODO: make DRY, we do this also in keyParser + if (keyInfo.type !== 'ec') { + keyInfo.fulltype = 'ssh-' + keyInfo.type; + } else { + // ECDSA + var asnReader = new Ber.Reader(keyInfo.private); + asnReader.readSequence(); + asnReader.readInt(); + asnReader.readString(Ber.OctetString, true); + asnReader.readByte(); // Skip "complex" context type byte + var offset = asnReader.readLength(); // Skip context length + if (offset !== null) { + asnReader._offset = offset; + switch (asnReader.readOID()) { + case '1.2.840.10045.3.1.7': + // prime256v1/secp256r1 + keyInfo.fulltype = 'ecdsa-sha2-nistp256'; + break; + case '1.3.132.0.34': + // secp384r1 + keyInfo.fulltype = 'ecdsa-sha2-nistp384'; + break; + case '1.3.132.0.35': + // secp521r1 + keyInfo.fulltype = 'ecdsa-sha2-nistp521'; + break; + } + } + if (keyInfo.fulltype === undefined) + return new Error('Unsupported EC private key type'); + } +} + +function genPublicKey(keyInfo) { + var publicKey; + var i; + + // RSA + var n; + var e; + + // DSA + var p; + var q; + var g; + var y; + + // ECDSA + var d; + var Q; + var ecCurveOID; + var ecCurveName; + + if (keyInfo.private) { + // parsing private key in ASN.1 format in order to generate a public key + var privKey = keyInfo.private; + var asnReader = new Ber.Reader(privKey); + var errMsg; + + if (asnReader.readSequence() === null) { + errMsg = 'Malformed private key (expected sequence)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + // version (ignored) + if (asnReader.readInt() === null) { + errMsg = 'Malformed private key (expected version)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + if (keyInfo.type === 'rsa') { + // modulus (n) -- integer + n = asnReader.readString(Ber.Integer, true); + if (n === null) { + errMsg = 'Malformed private key (expected RSA n value)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + // public exponent (e) -- integer + e = asnReader.readString(Ber.Integer, true); + if (e === null) { + errMsg = 'Malformed private key (expected RSA e value)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + publicKey = new Buffer(4 + 7 // ssh-rsa + + 4 + n.length + + 4 + e.length); + + publicKey.writeUInt32BE(7, 0, true); + publicKey.write('ssh-rsa', 4, 7, 'ascii'); + + i = 4 + 7; + publicKey.writeUInt32BE(e.length, i, true); + e.copy(publicKey, i += 4); + + publicKey.writeUInt32BE(n.length, i += e.length, true); + n.copy(publicKey, i += 4); + } else if (keyInfo.type === 'dss') { // DSA + // prime (p) -- integer + p = asnReader.readString(Ber.Integer, true); + if (p === null) { + errMsg = 'Malformed private key (expected DSA p value)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + // group order (q) -- integer + q = asnReader.readString(Ber.Integer, true); + if (q === null) { + errMsg = 'Malformed private key (expected DSA q value)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + // group generator (g) -- integer + g = asnReader.readString(Ber.Integer, true); + if (g === null) { + errMsg = 'Malformed private key (expected DSA g value)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + // public key value (y) -- integer + y = asnReader.readString(Ber.Integer, true); + if (y === null) { + errMsg = 'Malformed private key (expected DSA y value)'; + if (keyInfo._decrypted) + errMsg += '. Bad passphrase?'; + throw new Error(errMsg); + } + + publicKey = new Buffer(4 + 7 // ssh-dss + + 4 + p.length + + 4 + q.length + + 4 + g.length + + 4 + y.length); + + publicKey.writeUInt32BE(7, 0, true); + publicKey.write('ssh-dss', 4, 7, 'ascii'); + + i = 4 + 7; + publicKey.writeUInt32BE(p.length, i, true); + p.copy(publicKey, i += 4); + + publicKey.writeUInt32BE(q.length, i += p.length, true); + q.copy(publicKey, i += 4); + + publicKey.writeUInt32BE(g.length, i += q.length, true); + g.copy(publicKey, i += 4); + + publicKey.writeUInt32BE(y.length, i += g.length, true); + y.copy(publicKey, i += 4); + } else { // ECDSA + d = asnReader.readString(Ber.OctetString, true); + if (d === null) + throw new Error('Malformed private key (expected ECDSA private key)'); + asnReader.readByte(); // Skip "complex" context type byte + var offset = asnReader.readLength(); // Skip context length + if (offset === null) + throw new Error('Malformed private key (expected ECDSA context value)'); + asnReader._offset = offset; + ecCurveOID = asnReader.readOID(); + if (ecCurveOID === null) + throw new Error('Malformed private key (expected ECDSA curve)'); + var tempECDH; + switch (ecCurveOID) { + case '1.2.840.10045.3.1.7': + // prime256v1/secp256r1 + keyInfo.curve = ecCurveName = 'nistp256'; + tempECDH = crypto.createECDH('prime256v1'); + break; + case '1.3.132.0.34': + // secp384r1 + keyInfo.curve = ecCurveName = 'nistp384'; + tempECDH = crypto.createECDH('secp384r1'); + break; + case '1.3.132.0.35': + // secp521r1 + keyInfo.curve = ecCurveName = 'nistp521'; + tempECDH = crypto.createECDH('secp521r1'); + break; + default: + throw new Error('Malformed private key (unsupported EC curve)'); + } + tempECDH.setPrivateKey(d); + Q = tempECDH.getPublicKey(); + + publicKey = new Buffer(4 + 19 // ecdsa-sha2- + + 4 + 8 // + + 4 + Q.length); + + publicKey.writeUInt32BE(19, 0, true); + publicKey.write('ecdsa-sha2-' + ecCurveName, 4, 19, 'ascii'); + + publicKey.writeUInt32BE(8, 23, true); + publicKey.write(ecCurveName, 27, 8, 'ascii'); + + publicKey.writeUInt32BE(Q.length, 35, true); + Q.copy(publicKey, 39); + } + } else if (keyInfo.public) { + publicKey = keyInfo.public; + if (keyInfo.type === 'ec') { + // TODO: support adding ecdsa-* prefix + ecCurveName = keyInfo.curve; + } else if (publicKey[0] !== 0 + // check for missing ssh-{dsa,rsa} prefix + || publicKey[1] !== 0 + || publicKey[2] !== 0 + || publicKey[3] !== 7 + || publicKey[4] !== 115 + || publicKey[5] !== 115 + || publicKey[6] !== 104 + || publicKey[7] !== 45 + || ((publicKey[8] !== 114 + || publicKey[9] !== 115 + || publicKey[10] !== 97) + && + ((publicKey[8] !== 100 + || publicKey[9] !== 115 + || publicKey[10] !== 115)))) { + var newPK = new Buffer(4 + 7 + publicKey.length); + publicKey.copy(newPK, 11); + newPK.writeUInt32BE(7, 0, true); + if (keyInfo.type === 'rsa') + newPK.write('ssh-rsa', 4, 7, 'ascii'); + else + newPK.write('ssh-dss', 4, 7, 'ascii'); + publicKey = newPK; + } + } else + throw new Error('Missing data generated by parseKey()'); + + // generate a public key format for use with OpenSSL + + i = 4 + 7; + + var fulltype; + var asn1KeyBuf; + if (keyInfo.type === 'rsa') { + fulltype = 'ssh-rsa'; + asn1KeyBuf = RSAKeySSHToASN1(publicKey.slice(4 + 7)); + } else if (keyInfo.type === 'dss') { + fulltype = 'ssh-dss'; + asn1KeyBuf = DSAKeySSHToASN1(publicKey.slice(4 + 7)); + } else { // ECDSA + fulltype = 'ecdsa-sha2-' + ecCurveName; + asn1KeyBuf = ECDSAKeySSHToASN1(publicKey.slice(4 + 19)); + } + + if (!asn1KeyBuf) + throw new Error('Invalid SSH-formatted public key'); + + var b64key = asn1KeyBuf.toString('base64').replace(RE_KEY_LEN, '$1\n'); + var fullkey = '-----BEGIN PUBLIC KEY-----\n' + + b64key + + (b64key[b64key.length - 1] === '\n' ? '' : '\n') + + '-----END PUBLIC KEY-----'; + + return { + type: keyInfo.type, + fulltype: fulltype, + curve: ecCurveName, + public: publicKey, + publicOrig: new Buffer(fullkey) + }; +} + +function verifyPPKMAC(keyInfo, passphrase, privateKey) { + if (keyInfo._macresult !== undefined) + return keyInfo._macresult; + else if (!keyInfo.ppk) + throw new Error("Key isn't a PPK"); + else if (!keyInfo.privateMAC) + throw new Error('Missing MAC'); + else if (!privateKey) + throw new Error('Missing raw private key data'); + else if (keyInfo.encryption && typeof passphrase !== 'string') + throw new Error('Missing passphrase for encrypted PPK'); + else if (keyInfo.encryption && !keyInfo._decrypted) + throw new Error('PPK must be decrypted before verifying MAC'); + + var mac = keyInfo.privateMAC; + var typelen = keyInfo.fulltype.length; + // encryption algorithm is converted at this point for use with OpenSSL, + // so we need to use the original value so that the MAC is calculated + // correctly + var enc = (keyInfo.encryption ? 'aes256-cbc' : 'none'); + var enclen = enc.length; + var commlen = Buffer.byteLength(keyInfo.comment); + var pub = keyInfo.public; + var publen = pub.length; + var privlen = privateKey.length; + var macdata = new Buffer(4 + typelen + + 4 + enclen + + 4 + commlen + + 4 + publen + + 4 + privlen); + var p = 0; + + macdata.writeUInt32BE(typelen, p, true); + macdata.write(keyInfo.fulltype, p += 4, typelen, 'ascii'); + macdata.writeUInt32BE(enclen, p += typelen, true); + macdata.write(enc, p += 4, enclen, 'ascii'); + macdata.writeUInt32BE(commlen, p += enclen, true); + macdata.write(keyInfo.comment, p += 4, commlen, 'utf8'); + macdata.writeUInt32BE(publen, p += commlen, true); + pub.copy(macdata, p += 4); + macdata.writeUInt32BE(privlen, p += publen, true); + privateKey.copy(macdata, p += 4); + + if (typeof passphrase !== 'string') + passphrase = ''; + + var mackey = crypto.createHash('sha1') + .update('putty-private-key-file-mac-key', 'ascii') + .update(passphrase, 'utf8') + .digest(); + + var calcMAC = crypto.createHmac('sha1', mackey) + .update(macdata) + .digest('hex'); + + return (keyInfo._macresult = (calcMAC === mac)); +} + +function convertPPKPrivate(keyInfo) { + if (!keyInfo.ppk || !keyInfo.public || !keyInfo.private) + throw new Error("Key isn't a PPK"); + else if (keyInfo._converted) + return false; + + var pub = keyInfo.public; + var priv = keyInfo.private; + var asnWriter = new Ber.Writer(); + var p; + var q; + + if (keyInfo.type === 'rsa') { + var e = readString(pub, 4 + 7); + var n = readString(pub, pub._pos); + var d = readString(priv, 0); + p = readString(priv, priv._pos); + q = readString(priv, priv._pos); + var iqmp = readString(priv, priv._pos); + var p1 = new BigInteger(p, 256); + var q1 = new BigInteger(q, 256); + var dmp1 = new BigInteger(d, 256); + var dmq1 = new BigInteger(d, 256); + + dmp1 = new Buffer(dmp1.mod(p1.subtract(BigInteger.ONE)).toByteArray()); + dmq1 = new Buffer(dmq1.mod(q1.subtract(BigInteger.ONE)).toByteArray()); + + asnWriter.startSequence(); + asnWriter.writeInt(0x00, Ber.Integer); + asnWriter.writeBuffer(n, Ber.Integer); + asnWriter.writeBuffer(e, Ber.Integer); + asnWriter.writeBuffer(d, Ber.Integer); + asnWriter.writeBuffer(p, Ber.Integer); + asnWriter.writeBuffer(q, Ber.Integer); + asnWriter.writeBuffer(dmp1, Ber.Integer); + asnWriter.writeBuffer(dmq1, Ber.Integer); + asnWriter.writeBuffer(iqmp, Ber.Integer); + asnWriter.endSequence(); + } else { + p = readString(pub, 4 + 7); + q = readString(pub, pub._pos); + var g = readString(pub, pub._pos); + var y = readString(pub, pub._pos); + var x = readString(priv, 0); + + asnWriter.startSequence(); + asnWriter.writeInt(0x00, Ber.Integer); + asnWriter.writeBuffer(p, Ber.Integer); + asnWriter.writeBuffer(q, Ber.Integer); + asnWriter.writeBuffer(g, Ber.Integer); + asnWriter.writeBuffer(y, Ber.Integer); + asnWriter.writeBuffer(x, Ber.Integer); + asnWriter.endSequence(); + } + + var b64key = asnWriter.buffer.toString('base64').replace(RE_KEY_LEN, '$1\n'); + var fullkey = '-----BEGIN ' + + (keyInfo.type === 'rsa' ? 'RSA' : 'DSA') + + ' PRIVATE KEY-----\n' + + b64key + + (b64key[b64key.length - 1] === '\n' ? '' : '\n') + + '-----END ' + + (keyInfo.type === 'rsa' ? 'RSA' : 'DSA') + + ' PRIVATE KEY-----'; + + keyInfo.private = asnWriter.buffer; + keyInfo.privateOrig = new Buffer(fullkey); + keyInfo._converted = true; + return true; +} + +function readString(buffer, start, encoding, stream, cb, maxLen) { + if (encoding && !Buffer.isBuffer(encoding) && typeof encoding !== 'string') { + if (typeof cb === 'number') + maxLen = cb; + cb = stream; + stream = encoding; + encoding = undefined; + } + + start || (start = 0); + var bufferLen = buffer.length; + var left = (bufferLen - start); + var len; + var end; + if (start < 0 || start >= bufferLen || left < 4) { + stream && stream._cleanup(cb); + return false; + } + + len = buffer.readUInt32BE(start, true); + if (len > (maxLen || MAX_STRING_LEN) || left < (4 + len)) { + stream && stream._cleanup(cb); + return false; + } + + start += 4; + end = start + len; + buffer._pos = end; + + if (encoding) { + if (Buffer.isBuffer(encoding)) { + buffer.copy(encoding, 0, start, end); + return encoding; + } else + return buffer.toString(encoding, start, end); + } else + return buffer.slice(start, end); +} + diff --git a/reverse_engineering/node_modules/ssh2-streams/package.json b/reverse_engineering/node_modules/ssh2-streams/package.json new file mode 100644 index 0000000..396c3cc --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/package.json @@ -0,0 +1,73 @@ +{ + "_args": [ + [ + "ssh2-streams@0.1.20", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "ssh2-streams@0.1.20", + "_id": "ssh2-streams@0.1.20", + "_inBundle": false, + "_integrity": "sha1-URGNFUVV31Rp7h9n4M8efoosDjo=", + "_location": "/ssh2-streams", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "ssh2-streams@0.1.20", + "name": "ssh2-streams", + "escapedName": "ssh2-streams", + "rawSpec": "0.1.20", + "saveSpec": null, + "fetchSpec": "0.1.20" + }, + "_requiredBy": [ + "/ssh2" + ], + "_resolved": "https://registry.npmjs.org/ssh2-streams/-/ssh2-streams-0.1.20.tgz", + "_spec": "0.1.20", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Brian White", + "email": "mscdex@mscdex.net" + }, + "bugs": { + "url": "https://github.com/mscdex/ssh2-streams/issues" + }, + "dependencies": { + "asn1": "~0.2.0", + "semver": "^5.1.0", + "streamsearch": "~0.1.2" + }, + "description": "SSH2 and SFTP(v3) client/server protocol streams for node.js", + "engines": { + "node": ">=0.10.0" + }, + "homepage": "https://github.com/mscdex/ssh2-streams#readme", + "keywords": [ + "ssh", + "ssh2", + "sftp", + "secure", + "protocol", + "streams", + "client", + "server" + ], + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/mscdex/ssh2-streams/raw/master/LICENSE" + } + ], + "main": "./index", + "name": "ssh2-streams", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/mscdex/ssh2-streams.git" + }, + "scripts": { + "test": "node test/test.js" + }, + "version": "0.1.20" +} diff --git a/reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-dsa.ppk b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-dsa.ppk new file mode 100644 index 0000000..915508b --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-dsa.ppk @@ -0,0 +1,17 @@ +PuTTY-User-Key-File-2: ssh-dss +Encryption: aes256-cbc +Comment: dsa-key-20141202 +Public-Lines: 10 +AAAAB3NzaC1kc3MAAACBAJn2I8YefRo3BsEeinQt8KQ4cEyArAs7Y/W733oRSYOI +zWF1Ju124ysKrmg2okv+05CYcjV3Yp4AzQeomYAlgmB/7xCEnWaEnxCwAxmrrJMm +PrkwNjHOIi7yM5QOE90IM/Q+IJA4EPBfSb+Xr8fYhrp53KNHVSnc2KkOqpo2FsIj +AAAAFQC4NlP50GqyUqq2B82Vh/w5j3TzwQAAAIAeSGom9LLNdzcwCHnGfxKNnEz3 +55KITADTxiIpBvnQW+eDHwQvIw6V2Oc73bKCu5ZirZmIMW5w6KjQVwkuQBoF9Koq +/2u6VeevtL9pD6TBzSLMVw5pV3PmE4/C/eLiaUxZLIHdbzpqPkAvAUBrXKkj0ijz +cNzCp1fuF8H0pvR8yQAAAIAmvV+kqWhUgDYwNNz1qDaoS8XdsOponutZ/0stRQ66 +mKAy8kNVNNQ6oUx1XFl1WUt4iyFY/2Rz2fZhLz5/TbZRK5ygo666WgnxB/Ud4GAx +/BPQTghOJJOL00vJk+8jVCGNDc942V6nFXznDMXwqxhRCW6dm+2lTh7ntrli8mCk +5g== +Private-Lines: 1 +BytvbK+jNyMjiVxCO5lcE4YbW7q293oC+LZjkZ8Ajlw= +Private-MAC: c3da536ea28851fc32d5d1ff01498c8fcebc1170 diff --git a/reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-rsa.ppk b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-rsa.ppk new file mode 100644 index 0000000..6f2f7f7 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/encrypted-rsa.ppk @@ -0,0 +1,18 @@ +PuTTY-User-Key-File-2: ssh-rsa +Encryption: aes256-cbc +Comment: rsa-key-20141119 +Public-Lines: 4 +AAAAB3NzaC1yc2EAAAABJQAAAIBrBWETAVAyJmuNG53jwTNDlbIcH5lrEvcx6lx5 +bM6EKg0XmOIH96VqUjS7eRRTTD9lpBA8hYhkrOjOx93/JWB/pcVN8/B3DYHshT9O +BW1DCkrNwut2pbJ2oZOBirhhAr+xqWFr3551FqbzaCIXpOKubr4EcIwCipBl6PxL +USfHgw== +Private-Lines: 8 +8O3NrBePR4+4RHHys8wrRKCmgx3Gsdz1cKoRJJDgnnrQxuAxBTVUlVTC2vzSOXrP +jlKdRP9DbtrL5YF8g9HkMPpzzTdgpiEAGikpIc+L0sJhN+S9VvMoXRRKqyuB7o1C +xZhAeRaZ68izdUUbFd7ajUwBNpGoFppOznGXyf/3/Ao9FfoTKReZzeBd/e2/JFhc +nsYkSbtWfKQBVXF1Fhr10UwRWSMaVJSDkcSuk8ghICoKBBCgRBnZFap0SR77oIJh +DKgmNFktoKzEqh111vYPhQyEEyGNxpD0aEPaGUJEjPEd3C5a46n7mIiqrNX7QJoo +xxZtkueGdXWaoe5mBf1tFc+nCA1l72nUlghJZooQhnO9NPpieu6NNZ8X+tFQ1Rq/ +xvOZHzpDOOeOgWdV7oAmRDbDjYPh0H67z2OKCFaP0Z9kgmnwqV2IJvTDrexj1VwY +6kFaPldnK+ohXl37oVIlWA== +Private-MAC: 9d09a15a122e48955682ba969d33c75ba8e4be2c diff --git a/reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa new file mode 100644 index 0000000..c7c09e1 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpgIBAAKCAQEAysX9gGx5CsLW3xHqOgnrbfCuXy1Y0OFpZBxWs6FNHxtqpxRD +Ii8vnWQw9Ltul5NhBb1QrS/3XlODu5rv8VWZy+ciXDaeaVYvzCR3fHmQyDXA+WLR +x8kkR4ZxHTQREj1yifGyHdZTMsMbSaW9Eh2+sgWIjZaBkLjF2NDT52HX0y+zr2WL +TYVCcYf9ugCJIfwXN4/SpR+cV3MSak+4HfFDRo7EwFbYL8QRpNZLVVas5SUpN3CR +e6u26zPNgcqSRmC81lA/e2t7qG1qvjFy9RgJpUJJsZan5yMYAob9p83Fo61ajLX1 +DeqpZPauEfe9e/4bkjVbU8gW/cgb9Ct6v+59fQIDAQABAoIBAQCUH6GZKKQbKk1Q +UoPptclXXFPkjvmsTJ1yVgk32wK9UIfOWUzZMeL0dHYWsLyO2Ugsqyi9+qYXL6ax +avMQNlnMy6Eb/4IkrKLp21G1ihArUKkRBBD+gn6QCTOkA57c1ipPflGA3kmUcVYD +rKC7FSNyY/xmgaVQFMniIlpJUf4Y9BjIKC2FSTGxtKx2AI7eBDjmiZ7Mkcinaf62 +5jl0AfV0/PgBb7AqhPWcmJp+/jWyZ3IrlPvMspEg6PfLnNzACKrh/Qorkhf3cLcn +aFJmnyRn4Zhx+scqoWq1NcYOfNyU+eAEofTu8TwvGVh0nx9C2vxwPftjDQlfh0kf +wGscN72BAoGBAOZs4u5ObnFf4pNbGfjhLhSf2vXATb9TYjr8vPc4xbviukYGPaNR +tP5d9oXHecFP37IeV6I0SmMmd0Sth56/RfRFD8Rirq3MjjKWvwwbJ1sd34n9cqN3 +goSWERfqPOzVMjxEPqq0wm/7Y61EU5Gt1ou8MOOS+lPx3LmpapSjSNutAoGBAOFH +bLZQUpLaUf7ijCU/yykRdpzBpwDDEgRqf7RLqS8p+tzI9z39KfNSxrVEqhWe7uuZ +3dl8EjrXhBK8FoCDAD6yrx35aUUeUJHZp/kFoXBdlHpGk8VvveTJMSqPS6KxKglB +JqINECpwHzasQSun2vhLsH6ikUUqnB8qkJYMNGMRAoGBAJ5wio1/4pMUclAOWUp3 +U18dF8rZItyGskmLFUgMgnIRkTGrAuD0wrYQ1/zez0pr6I2qLWm7+NY6UdIoW9N9 +A9XVUuUfUarxt2jznIPve908KkPQwO3DtDwEj9ZY2vGiGruz1T89sWCcg8pU6/2E +gbxtd1RT8MqdyL7kzgNMT0G1AoGBAMc1otOBKdPEjVeIPAfAZsXVNH8f268X0Lno +9y6W7W/Qfo54g/HkChGqNB8dLg5accoTURO93Fz+MUNj4b4YmcHXvnUSFV3Slcqz +nw7dRMoZZm/OM2QXEOPcacbJ2kduxM5UN3Z7eL0eUuvE3QhesYp5XARPe4ifSllh +CWQ2PKkRAoGBAMFANW/IxEH6H60yfJbY9t/0ggu4DoVI62rmv43n2dYsdNeaF1Fl +SbGoK16+6ERyjyJtqe0642LpgenrEcpJbXxr1qtG6gGRCJIcZ8gJDiVj4XW1d1cZ +eCNZLVlVOrBNRkKmXDD5Sd10m52tlBMfarvskgbJVvkRO6pSz3o8Q0B1 +-----END RSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa_enc b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa_enc new file mode 100644 index 0000000..ec0646c --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/id_rsa_enc @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,3E36D2FEB814842942EC58DA9722DB75 + +MBCl5tVBiTjZywjXSMPNnVcBzuAhZzLsvb/pPofnVNnx96CqBMAzaKxGu6Ls9UBu +ejGGOik+8EJHV7p7dj6GAf5srgBnFXVYQy69eoL4xl1IuKoSKOXicS70/QXuApzY +tBdwnqMz1y1tuWZ0q4iOZYQHqvNTvhH4s/rfm8/As7GwPi4NQ4FipsbOifKVIlJN +0+XQAZLBjD6D2BFbVBas2RyI/zT5NyFtFxEPlQwWjuajnDrdVFf59mkzHzQc8Jo1 +qjCv6bDHTW9EeoZeloqpAaYegmOYGynb8HQzmbEBoarXsOkwtaI38jXT3WOljaVJ +xuWV7xl181CsV4dgbyd948YKcHg3riRQiQwXXlKe4MTGwcQ3sybR4oFLzFIa6Ct2 +qbsyqvc+3tiBhOZtiAXgDdbzvXR+60aSTjduVp+52lF/m4l658Sleyx2hxQ3LEcc +lbuG27WIbLEATeqV9gaJ1jDtEyEtGNALAdIZF6YdND5tiTSqHd0n3yUMbZVdiQ5i +s5lSfWD38c3Is4hOdZlG9nMFoAGe7wnFmV+N5Cv3vnlFPFogoO4eVZS5S+ReAmas +syOelwKvVNNy0FzCoQ3URZWLQTKsZ7Wkl1ve9jJ5ZPox0p09A0gcykIB5oya9/OL +oTKDhoenoh0p3ttyQ7WbkUc9VwHNn+rX7N3v/Ui+NF8mnCfD/1UOzBhRY0LjDCfO +qT70wd63c4cSr0HCTDNrGtShztJD9gdzwIH6q/eNDjFk1S8KOrV/K6wQwWzULs+c +Ld/7xkELz+d60Ll5dVUHcSxOhYwBVY+GGC5ybOmSqr5+t1MDfLN0Q4d3x0X6LU2q +sL94x93XFY342x5NpjOusKISCaCx9eBXxCpi132IAfmB7kDZXpbe02aed0bNChUP +IM6eXFYVfPb5YZ3oXlWJ+e/E01dGPK2rHnAzOLH2DSmDZ7MWZW5hkRKQ4zU6/ECo +0K7uGLAdpCoC8FKWcE2Gm7fol5fmHes06SNC1wLyVHtv4g7kFfnfmNIVVy3lSP1f +WoHRL52lNOCR+E1N03djvyKoK46cKiJ9TcBrJXcMHSSjPvqLafS/AUK+FYjxlqyM +qXgATtt/n0h+VxjPm2+xZNVHC5fD5GHhoo3PhmapLpLNyDlhzJTy/iFtrmX6uht1 +4XDYmzzQyoRu6+1DQ7HPpwXZf84+QFGGfb/LTiLUF+pgCHOt41HdHEaU6eIHGEk0 +wfTfWG2pP/2wDUtX8KVAnikfIaa5KrC/ZuDV5pfp0Tk+Rjt2iv9+YuB7fPrvKF3Z +B7zgMuBG8FQZMFHvWsaDHE/vo8fivgeM0zVL0shigLuwL+p6EeanRdm5bRavF8M8 +XDtOjZt/id4UMQDuobawUVadTZhtGfFNfO9Wt16EAa6jhRBJ1A+FRGqFbJXV36ZV +zzx7edCzyCd0rcsZoi6Kz4NeDxm1lcGspBBhstUT5cbgQo91wAZCU1hpj/qsqygr +KK4NcNpDjYFc1azb/zRGe+zx1WzWhKAZR6HfMEoJwzyzTcwRmApbTEYqv+oYxC0+ +f/HNQaCH/UGDy4QEvMVaJRnjFv7oW6GpS7ob6POvi2esgYglwNJR1hN86W7kfOrF +-----END RSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2-streams/test/fixtures/ssh_host_rsa_key b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/ssh_host_rsa_key new file mode 100644 index 0000000..9c2cc6f --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/fixtures/ssh_host_rsa_key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQC57UB/5H0M+t+mopksrltCCIXghryzofJjau+8tuMT9CG6ta3S +O9aKApJUUG/xtc88giVhB7HFABX/oob+jrkSthR8s/whULC8E+GhvOBjHydRUZIs +aPYOMBb42HcbOsgq3li/hwOcDk0vY00hZDKCum9BgvRAb7dPEkw2dmiCQQIDAQAB +AoGAMG+HOwoaLbR5aR64yrQNYBF6Vvii1iUdURr9o2r9kygpVUuZIcim5kMvPbnK +v+w+NaQt+q4XeJvCH1uG0W/69FwnphfaOVmCCUtsoJ6sU3fWr9x59MtKL2Llh8xR +50lz6R+eDXoYRDq245hG9BFn/bu0vtqQqx06mlZJcjaRocECQQDjdYFmr+DSww3x +VNx0G0DUkaQZZ+iqZiT3Zund2pcBB4aLiewOrqj0GFct4+YNzgxIXPejmS0eSokN +N2lC3NxZAkEA0UGjN5TG5/LEK3zcYtx2kpXryenrYORo1n2L/WPMZ0mjLQyd4LJr +ibfgVUfwX/kV3vgGYLwjpgcaTiMsecv4KQJAYMmMgZSPdz+WvD1e/WznXkyG5mSn +xXJngnrhQw0TulVodBIBR5IcxJli510VdIRcB6K/oXa5ky0mOmB8wv3WKQJBAKEF +PxE//KbzWhyUogm4180IbD4dMDCI0ltqlFRRfTJlqZi6wqnq4XFB+u/kwYU4aKoA +dPfvDgduI8HIsyqt17ECQDI/HC8PiYsDIOyVpQuQdIAsbGmoavK7X1MVEWR2nj9t +7BbUVFSnVKynL4TWIJZ6xP8WQwkDBQc5WjognHDaUTQ= +-----END RSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-durability-ssh.js b/reverse_engineering/node_modules/ssh2-streams/test/test-durability-ssh.js new file mode 100644 index 0000000..853656b --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-durability-ssh.js @@ -0,0 +1,169 @@ +var SSH2Stream = require('../lib/ssh'); + +var fs = require('fs'); +var path = require('path'); +var inspect = require('util').inspect; +var inherits = require('util').inherits; +var TransformStream = require('stream').Transform; +var assert = require('assert'); + +var t = -1; +var group = path.basename(__filename, '.js') + '/'; +var fixturesdir = path.join(__dirname, 'fixtures'); + +var HOST_KEY_RSA = fs.readFileSync(path.join(fixturesdir, 'ssh_host_rsa_key')); +var SERVER_CONFIG = { + server: true, + hostKeys: { 'ssh-rsa': HOST_KEY_RSA } +}; + +function SimpleStream() { + TransformStream.call(this); + this.buffer = ''; +} +inherits(SimpleStream, TransformStream); +SimpleStream.prototype._transform = function(chunk, encoding, cb) { + this.buffer += chunk.toString('binary'); + cb(); +}; + +var tests = [ + { run: function() { + var what = this.what; + var serverError = false; + var server = new SSH2Stream(SERVER_CONFIG); + var client = new SimpleStream(); + + client.pipe(server).pipe(client); + + server.on('error', function(err) { + serverError = err; + assert(err.message === 'Protocol version not supported', + makeMsg(what, 'Wrong error message')); + }).on('end', function() { + assert(client.buffer === server.config.ident + '\r\n', + makeMsg(what, 'Wrong server ident: ' + inspect(client.buffer))); + assert(serverError, makeMsg(what, 'Expected server error')); + next(); + }); + + client.push('SSH-1.0-aaa\r\n'); + }, + what: 'Incompatible client SSH protocol version' + }, + { run: function() { + var what = this.what; + var serverError = false; + var server = new SSH2Stream(SERVER_CONFIG); + var client = new SimpleStream(); + + client.pipe(server).pipe(client); + + server.on('error', function(err) { + serverError = err; + assert(err.message === 'Bad identification start', + makeMsg(what, 'Wrong error message')); + }).on('end', function() { + assert(client.buffer === server.config.ident + '\r\n', + makeMsg(what, 'Wrong server ident: ' + inspect(client.buffer))); + assert(serverError, makeMsg(what, 'Expected server error')); + next(); + }); + client.push('LOL-2.0-asdf\r\n'); + }, + what: 'Malformed client protocol identification' + }, + { run: function() { + var what = this.what; + var serverError = false; + var server = new SSH2Stream(SERVER_CONFIG); + var client = new SimpleStream(); + + client.pipe(server).pipe(client); + + server.on('error', function(err) { + serverError = err; + assert(err.message === 'Max identification string size exceeded', + makeMsg(what, 'Wrong error message')); + }).on('end', function() { + assert(client.buffer === server.config.ident + '\r\n', + makeMsg(what, 'Wrong server ident: ' + inspect(client.buffer))); + assert(serverError, makeMsg(what, 'Expected server error')); + next(); + }); + var ident = 'SSH-2.0-'; + for (var i = 0; i < 30; ++i) + ident += 'foobarbaz'; + ident += '\r\n'; + client.push(ident); + }, + what: 'SSH client protocol identification too long (> 255 characters)' + }, + { run: function() { + var what = this.what; + var serverError = false; + var server = new SSH2Stream(SERVER_CONFIG); + var client = new SimpleStream(); + + client.pipe(server).pipe(client); + + server.on('error', function(err) { + serverError = err; + assert(err.message === 'Bad packet length', + makeMsg(what, 'Wrong error message')); + }).on('end', function() { + assert(client.buffer.length, makeMsg(what, 'Expected server data')); + assert(serverError, makeMsg(what, 'Expected server error')); + next(); + }); + client.push('SSH-2.0-asdf\r\n'); + // 500,000 byte packet_length + client.push(new Buffer([0x00, 0x07, 0xA1, 0x20, 0x00, 0x00, 0x00, 0x00])); + }, + what: 'Bad packet length (max)' + }, + { run: function() { + var what = this.what; + var serverError = false; + var server = new SSH2Stream(SERVER_CONFIG); + var client = new SimpleStream(); + + client.pipe(server).pipe(client); + + server.on('error', function(err) { + serverError = err; + assert(err.message === 'Bad packet length', + makeMsg(what, 'Wrong error message')); + }).on('end', function() { + assert(client.buffer.length, makeMsg(what, 'Expected server data')); + assert(serverError, makeMsg(what, 'Expected server error')); + next(); + }); + client.push('SSH-2.0-asdf\r\n'); + client.push(new Buffer([0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00])); + }, + what: 'Bad packet length (min)' + }, +]; + +function next() { + if (Array.isArray(process._events.exit)) + process._events.exit = process._events.exit[1]; + if (++t === tests.length) + return; + + var v = tests[t]; + v.run.call(v); +} + +function makeMsg(what, msg) { + return '[' + group + what + ']: ' + msg; +} + +process.once('exit', function() { + assert(t === tests.length, + makeMsg('_exit', + 'Only finished ' + t + '/' + tests.length + ' tests')); +}); + +next(); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-kexdh.js b/reverse_engineering/node_modules/ssh2-streams/test/test-kexdh.js new file mode 100644 index 0000000..5544168 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-kexdh.js @@ -0,0 +1,66 @@ +var SSH2Stream = require('../lib/ssh'); +var parseKey = require('../lib/utils').parseKey; +var genPubKey = require('../lib/utils').genPublicKey; +var MESSAGE = require('../lib/constants').MESSAGE; + +var assert = require('assert'); +var fs = require('fs'); + +var SERVER_KEY = fs.readFileSync(__dirname + '/fixtures/ssh_host_rsa_key'); +var SERVER_KEY_PRV = parseKey(SERVER_KEY); +var SERVER_KEY_PUB = genPubKey(SERVER_KEY_PRV); + +var server = new SSH2Stream({ + server: true, + hostKeys: { + 'ssh-rsa': { + privateKey: SERVER_KEY_PRV, + publicKey: SERVER_KEY_PUB, + } + } +}); +var client = new SSH2Stream(); +var cliError; +var srvError; + +server.on('error', function(err) { + assert(err); + assert(/unexpected/.test(err.message)); + assert(!srvError); + srvError = err; +}); + +// Removed 'KEXDH_REPLY' listeners as it causes client to send 'NEWKEYS' which +// changes server's state. +client.removeAllListeners('KEXDH_REPLY'); +// Removed 'NEWKEYS' listeners as server sends 'NEWKEYS' after receiving +// 'KEXDH_INIT' which causes errors on client if 'NEWKEYS' is processed +// without processing 'KEXDH_REPLY' +client.removeAllListeners('NEWKEYS'); +// Added 'KEXDH_REPLY' which violates protocol and re-sends 'KEXDH_INIT' +// packet +client.on('KEXDH_REPLY', function(info) { + var state = client._state; + var outstate = state.outgoing; + var buf = new Buffer(1 + 4 + outstate.pubkey.length); + buf[0] = MESSAGE.KEXDH_INIT; + buf.writeUInt32BE(outstate.pubkey.length, 1, true); + outstate.pubkey.copy(buf, 5); + SSH2Stream._send(client, buf, undefined, true); +}); +client.on('error', function(err) { + assert(!cliError); + assert(err); + assert.equal( + err.message, + 'PROTOCOL_ERROR', + 'Expected Error: PROTOCOL_ERROR Got Error: ' + err.message + ); + cliError = err; +}); +client.pipe(server).pipe(client); + +process.on('exit', function() { + assert(cliError, 'Expected client error'); + //assert(srvError, 'Expected server error'); +}); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-keyparser.js b/reverse_engineering/node_modules/ssh2-streams/test/test-keyparser.js new file mode 100644 index 0000000..d634dc3 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-keyparser.js @@ -0,0 +1,2301 @@ +var parser = require('../lib/keyParser'); + +var path = require('path'); +var assert = require('assert'); +var inspect = require('util').inspect; + +var group = path.basename(__filename, '.js') + '/'; + +[ + { source: [ + '-----BEGIN RSA PRIVATE KEY-----', + 'MIIBywIBAAJhANXy0sLFQFgcY6sfyqlrBBNAcow7MWjQS0flUEj8HmrdTX+denZ5', + 'sArVEBWIoIcCWt0DWGz/mZDS2XPqyy9IbEnhwtZ3bpHmw11J1k61odAwy5sE3P4S', + 'kS/Svl7Wxjcr5wIDAQABAmEAx50pG2QiZpMGQvEEixE32iZ1OXU44kwFEO1Y/N+O', + 'GbKkzf5F9igPIewsd4rkC1MbcVPYTN6a7WlFh2dP1kC5wdPAdVjBf1p7Ii4t4HZt', + 'OokidduJzCmC+TulDCNLFmlBAjEA/6SUxcxRXJV7GHBmu+FIbnR6/drHaBvMkNKZ', + '/tGIq1KfrHIWrhGl2vYqot6hRgoXAjEA1j9VAyuDK2NhQG/WMmf8WV+OMeJQ7vpn', + '3KmtPc/CsWfPNYeQagfwlj0SLGwz8J6xAjEA4qIRkllUN/P9MOlU6nMKN4HkF3EB', + 'HvamIC3Uf+dadifFjqeGG8NhegCKQ+GCsAj5AjAecs3AL5Tqu6r8n2hPrZudkAkD', + 'aeiarhfIsoyedkJzOA+oyeVqTEqS74b/rTkMf2ECMBrts0oFmVJYo77riKQBYbR7', + '/8feisYSPHSg7hwaoXSwU+jU4g39w7rgvO+WkGuzyQ==', + '-----END RSA PRIVATE KEY-----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: new Buffer([ + 0x30, 0x82, 0x01, 0xcb, 0x02, 0x01, 0x00, 0x02, 0x61, 0x00, 0xd5, 0xf2, + 0xd2, 0xc2, 0xc5, 0x40, 0x58, 0x1c, 0x63, 0xab, 0x1f, 0xca, 0xa9, 0x6b, + 0x04, 0x13, 0x40, 0x72, 0x8c, 0x3b, 0x31, 0x68, 0xd0, 0x4b, 0x47, 0xe5, + 0x50, 0x48, 0xfc, 0x1e, 0x6a, 0xdd, 0x4d, 0x7f, 0x9d, 0x7a, 0x76, 0x79, + 0xb0, 0x0a, 0xd5, 0x10, 0x15, 0x88, 0xa0, 0x87, 0x02, 0x5a, 0xdd, 0x03, + 0x58, 0x6c, 0xff, 0x99, 0x90, 0xd2, 0xd9, 0x73, 0xea, 0xcb, 0x2f, 0x48, + 0x6c, 0x49, 0xe1, 0xc2, 0xd6, 0x77, 0x6e, 0x91, 0xe6, 0xc3, 0x5d, 0x49, + 0xd6, 0x4e, 0xb5, 0xa1, 0xd0, 0x30, 0xcb, 0x9b, 0x04, 0xdc, 0xfe, 0x12, + 0x91, 0x2f, 0xd2, 0xbe, 0x5e, 0xd6, 0xc6, 0x37, 0x2b, 0xe7, 0x02, 0x03, + 0x01, 0x00, 0x01, 0x02, 0x61, 0x00, 0xc7, 0x9d, 0x29, 0x1b, 0x64, 0x22, + 0x66, 0x93, 0x06, 0x42, 0xf1, 0x04, 0x8b, 0x11, 0x37, 0xda, 0x26, 0x75, + 0x39, 0x75, 0x38, 0xe2, 0x4c, 0x05, 0x10, 0xed, 0x58, 0xfc, 0xdf, 0x8e, + 0x19, 0xb2, 0xa4, 0xcd, 0xfe, 0x45, 0xf6, 0x28, 0x0f, 0x21, 0xec, 0x2c, + 0x77, 0x8a, 0xe4, 0x0b, 0x53, 0x1b, 0x71, 0x53, 0xd8, 0x4c, 0xde, 0x9a, + 0xed, 0x69, 0x45, 0x87, 0x67, 0x4f, 0xd6, 0x40, 0xb9, 0xc1, 0xd3, 0xc0, + 0x75, 0x58, 0xc1, 0x7f, 0x5a, 0x7b, 0x22, 0x2e, 0x2d, 0xe0, 0x76, 0x6d, + 0x3a, 0x89, 0x22, 0x75, 0xdb, 0x89, 0xcc, 0x29, 0x82, 0xf9, 0x3b, 0xa5, + 0x0c, 0x23, 0x4b, 0x16, 0x69, 0x41, 0x02, 0x31, 0x00, 0xff, 0xa4, 0x94, + 0xc5, 0xcc, 0x51, 0x5c, 0x95, 0x7b, 0x18, 0x70, 0x66, 0xbb, 0xe1, 0x48, + 0x6e, 0x74, 0x7a, 0xfd, 0xda, 0xc7, 0x68, 0x1b, 0xcc, 0x90, 0xd2, 0x99, + 0xfe, 0xd1, 0x88, 0xab, 0x52, 0x9f, 0xac, 0x72, 0x16, 0xae, 0x11, 0xa5, + 0xda, 0xf6, 0x2a, 0xa2, 0xde, 0xa1, 0x46, 0x0a, 0x17, 0x02, 0x31, 0x00, + 0xd6, 0x3f, 0x55, 0x03, 0x2b, 0x83, 0x2b, 0x63, 0x61, 0x40, 0x6f, 0xd6, + 0x32, 0x67, 0xfc, 0x59, 0x5f, 0x8e, 0x31, 0xe2, 0x50, 0xee, 0xfa, 0x67, + 0xdc, 0xa9, 0xad, 0x3d, 0xcf, 0xc2, 0xb1, 0x67, 0xcf, 0x35, 0x87, 0x90, + 0x6a, 0x07, 0xf0, 0x96, 0x3d, 0x12, 0x2c, 0x6c, 0x33, 0xf0, 0x9e, 0xb1, + 0x02, 0x31, 0x00, 0xe2, 0xa2, 0x11, 0x92, 0x59, 0x54, 0x37, 0xf3, 0xfd, + 0x30, 0xe9, 0x54, 0xea, 0x73, 0x0a, 0x37, 0x81, 0xe4, 0x17, 0x71, 0x01, + 0x1e, 0xf6, 0xa6, 0x20, 0x2d, 0xd4, 0x7f, 0xe7, 0x5a, 0x76, 0x27, 0xc5, + 0x8e, 0xa7, 0x86, 0x1b, 0xc3, 0x61, 0x7a, 0x00, 0x8a, 0x43, 0xe1, 0x82, + 0xb0, 0x08, 0xf9, 0x02, 0x30, 0x1e, 0x72, 0xcd, 0xc0, 0x2f, 0x94, 0xea, + 0xbb, 0xaa, 0xfc, 0x9f, 0x68, 0x4f, 0xad, 0x9b, 0x9d, 0x90, 0x09, 0x03, + 0x69, 0xe8, 0x9a, 0xae, 0x17, 0xc8, 0xb2, 0x8c, 0x9e, 0x76, 0x42, 0x73, + 0x38, 0x0f, 0xa8, 0xc9, 0xe5, 0x6a, 0x4c, 0x4a, 0x92, 0xef, 0x86, 0xff, + 0xad, 0x39, 0x0c, 0x7f, 0x61, 0x02, 0x30, 0x1a, 0xed, 0xb3, 0x4a, 0x05, + 0x99, 0x52, 0x58, 0xa3, 0xbe, 0xeb, 0x88, 0xa4, 0x01, 0x61, 0xb4, 0x7b, + 0xff, 0xc7, 0xde, 0x8a, 0xc6, 0x12, 0x3c, 0x74, 0xa0, 0xee, 0x1c, 0x1a, + 0xa1, 0x74, 0xb0, 0x53, 0xe8, 0xd4, 0xe2, 0x0d, 0xfd, 0xc3, 0xba, 0xe0, + 0xbc, 0xef, 0x96, 0x90, 0x6b, 0xb3, 0xc9, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x52, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x42, + 0x79, 0x77, 0x49, 0x42, 0x41, 0x41, 0x4a, 0x68, 0x41, 0x4e, 0x58, 0x79, + 0x30, 0x73, 0x4c, 0x46, 0x51, 0x46, 0x67, 0x63, 0x59, 0x36, 0x73, 0x66, + 0x79, 0x71, 0x6c, 0x72, 0x42, 0x42, 0x4e, 0x41, 0x63, 0x6f, 0x77, 0x37, + 0x4d, 0x57, 0x6a, 0x51, 0x53, 0x30, 0x66, 0x6c, 0x55, 0x45, 0x6a, 0x38, + 0x48, 0x6d, 0x72, 0x64, 0x54, 0x58, 0x2b, 0x64, 0x65, 0x6e, 0x5a, 0x35, + 0x0a, 0x73, 0x41, 0x72, 0x56, 0x45, 0x42, 0x57, 0x49, 0x6f, 0x49, 0x63, + 0x43, 0x57, 0x74, 0x30, 0x44, 0x57, 0x47, 0x7a, 0x2f, 0x6d, 0x5a, 0x44, + 0x53, 0x32, 0x58, 0x50, 0x71, 0x79, 0x79, 0x39, 0x49, 0x62, 0x45, 0x6e, + 0x68, 0x77, 0x74, 0x5a, 0x33, 0x62, 0x70, 0x48, 0x6d, 0x77, 0x31, 0x31, + 0x4a, 0x31, 0x6b, 0x36, 0x31, 0x6f, 0x64, 0x41, 0x77, 0x79, 0x35, 0x73, + 0x45, 0x33, 0x50, 0x34, 0x53, 0x0a, 0x6b, 0x53, 0x2f, 0x53, 0x76, 0x6c, + 0x37, 0x57, 0x78, 0x6a, 0x63, 0x72, 0x35, 0x77, 0x49, 0x44, 0x41, 0x51, + 0x41, 0x42, 0x41, 0x6d, 0x45, 0x41, 0x78, 0x35, 0x30, 0x70, 0x47, 0x32, + 0x51, 0x69, 0x5a, 0x70, 0x4d, 0x47, 0x51, 0x76, 0x45, 0x45, 0x69, 0x78, + 0x45, 0x33, 0x32, 0x69, 0x5a, 0x31, 0x4f, 0x58, 0x55, 0x34, 0x34, 0x6b, + 0x77, 0x46, 0x45, 0x4f, 0x31, 0x59, 0x2f, 0x4e, 0x2b, 0x4f, 0x0a, 0x47, + 0x62, 0x4b, 0x6b, 0x7a, 0x66, 0x35, 0x46, 0x39, 0x69, 0x67, 0x50, 0x49, + 0x65, 0x77, 0x73, 0x64, 0x34, 0x72, 0x6b, 0x43, 0x31, 0x4d, 0x62, 0x63, + 0x56, 0x50, 0x59, 0x54, 0x4e, 0x36, 0x61, 0x37, 0x57, 0x6c, 0x46, 0x68, + 0x32, 0x64, 0x50, 0x31, 0x6b, 0x43, 0x35, 0x77, 0x64, 0x50, 0x41, 0x64, + 0x56, 0x6a, 0x42, 0x66, 0x31, 0x70, 0x37, 0x49, 0x69, 0x34, 0x74, 0x34, + 0x48, 0x5a, 0x74, 0x0a, 0x4f, 0x6f, 0x6b, 0x69, 0x64, 0x64, 0x75, 0x4a, + 0x7a, 0x43, 0x6d, 0x43, 0x2b, 0x54, 0x75, 0x6c, 0x44, 0x43, 0x4e, 0x4c, + 0x46, 0x6d, 0x6c, 0x42, 0x41, 0x6a, 0x45, 0x41, 0x2f, 0x36, 0x53, 0x55, + 0x78, 0x63, 0x78, 0x52, 0x58, 0x4a, 0x56, 0x37, 0x47, 0x48, 0x42, 0x6d, + 0x75, 0x2b, 0x46, 0x49, 0x62, 0x6e, 0x52, 0x36, 0x2f, 0x64, 0x72, 0x48, + 0x61, 0x42, 0x76, 0x4d, 0x6b, 0x4e, 0x4b, 0x5a, 0x0a, 0x2f, 0x74, 0x47, + 0x49, 0x71, 0x31, 0x4b, 0x66, 0x72, 0x48, 0x49, 0x57, 0x72, 0x68, 0x47, + 0x6c, 0x32, 0x76, 0x59, 0x71, 0x6f, 0x74, 0x36, 0x68, 0x52, 0x67, 0x6f, + 0x58, 0x41, 0x6a, 0x45, 0x41, 0x31, 0x6a, 0x39, 0x56, 0x41, 0x79, 0x75, + 0x44, 0x4b, 0x32, 0x4e, 0x68, 0x51, 0x47, 0x2f, 0x57, 0x4d, 0x6d, 0x66, + 0x38, 0x57, 0x56, 0x2b, 0x4f, 0x4d, 0x65, 0x4a, 0x51, 0x37, 0x76, 0x70, + 0x6e, 0x0a, 0x33, 0x4b, 0x6d, 0x74, 0x50, 0x63, 0x2f, 0x43, 0x73, 0x57, + 0x66, 0x50, 0x4e, 0x59, 0x65, 0x51, 0x61, 0x67, 0x66, 0x77, 0x6c, 0x6a, + 0x30, 0x53, 0x4c, 0x47, 0x77, 0x7a, 0x38, 0x4a, 0x36, 0x78, 0x41, 0x6a, + 0x45, 0x41, 0x34, 0x71, 0x49, 0x52, 0x6b, 0x6c, 0x6c, 0x55, 0x4e, 0x2f, + 0x50, 0x39, 0x4d, 0x4f, 0x6c, 0x55, 0x36, 0x6e, 0x4d, 0x4b, 0x4e, 0x34, + 0x48, 0x6b, 0x46, 0x33, 0x45, 0x42, 0x0a, 0x48, 0x76, 0x61, 0x6d, 0x49, + 0x43, 0x33, 0x55, 0x66, 0x2b, 0x64, 0x61, 0x64, 0x69, 0x66, 0x46, 0x6a, + 0x71, 0x65, 0x47, 0x47, 0x38, 0x4e, 0x68, 0x65, 0x67, 0x43, 0x4b, 0x51, + 0x2b, 0x47, 0x43, 0x73, 0x41, 0x6a, 0x35, 0x41, 0x6a, 0x41, 0x65, 0x63, + 0x73, 0x33, 0x41, 0x4c, 0x35, 0x54, 0x71, 0x75, 0x36, 0x72, 0x38, 0x6e, + 0x32, 0x68, 0x50, 0x72, 0x5a, 0x75, 0x64, 0x6b, 0x41, 0x6b, 0x44, 0x0a, + 0x61, 0x65, 0x69, 0x61, 0x72, 0x68, 0x66, 0x49, 0x73, 0x6f, 0x79, 0x65, + 0x64, 0x6b, 0x4a, 0x7a, 0x4f, 0x41, 0x2b, 0x6f, 0x79, 0x65, 0x56, 0x71, + 0x54, 0x45, 0x71, 0x53, 0x37, 0x34, 0x62, 0x2f, 0x72, 0x54, 0x6b, 0x4d, + 0x66, 0x32, 0x45, 0x43, 0x4d, 0x42, 0x72, 0x74, 0x73, 0x30, 0x6f, 0x46, + 0x6d, 0x56, 0x4a, 0x59, 0x6f, 0x37, 0x37, 0x72, 0x69, 0x4b, 0x51, 0x42, + 0x59, 0x62, 0x52, 0x37, 0x0a, 0x2f, 0x38, 0x66, 0x65, 0x69, 0x73, 0x59, + 0x53, 0x50, 0x48, 0x53, 0x67, 0x37, 0x68, 0x77, 0x61, 0x6f, 0x58, 0x53, + 0x77, 0x55, 0x2b, 0x6a, 0x55, 0x34, 0x67, 0x33, 0x39, 0x77, 0x37, 0x72, + 0x67, 0x76, 0x4f, 0x2b, 0x57, 0x6b, 0x47, 0x75, 0x7a, 0x79, 0x51, 0x3d, + 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x52, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + public: undefined, + publicOrig: undefined + }, + what: 'Unencrypted RSA private key (OpenSSH format)' + }, + { source: [ + ' ', + '-----BEGIN RSA PRIVATE KEY-----', + 'MIIBywIBAAJhANXy0sLFQFgcY6sfyqlrBBNAcow7MWjQS0flUEj8HmrdTX+denZ5', + 'sArVEBWIoIcCWt0DWGz/mZDS2XPqyy9IbEnhwtZ3bpHmw11J1k61odAwy5sE3P4S', + 'kS/Svl7Wxjcr5wIDAQABAmEAx50pG2QiZpMGQvEEixE32iZ1OXU44kwFEO1Y/N+O', + 'GbKkzf5F9igPIewsd4rkC1MbcVPYTN6a7WlFh2dP1kC5wdPAdVjBf1p7Ii4t4HZt', + 'OokidduJzCmC+TulDCNLFmlBAjEA/6SUxcxRXJV7GHBmu+FIbnR6/drHaBvMkNKZ', + '/tGIq1KfrHIWrhGl2vYqot6hRgoXAjEA1j9VAyuDK2NhQG/WMmf8WV+OMeJQ7vpn', + '3KmtPc/CsWfPNYeQagfwlj0SLGwz8J6xAjEA4qIRkllUN/P9MOlU6nMKN4HkF3EB', + 'HvamIC3Uf+dadifFjqeGG8NhegCKQ+GCsAj5AjAecs3AL5Tqu6r8n2hPrZudkAkD', + 'aeiarhfIsoyedkJzOA+oyeVqTEqS74b/rTkMf2ECMBrts0oFmVJYo77riKQBYbR7', + '/8feisYSPHSg7hwaoXSwU+jU4g39w7rgvO+WkGuzyQ==', + '-----END RSA PRIVATE KEY-----', + ' ' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: new Buffer([ + 0x30, 0x82, 0x01, 0xcb, 0x02, 0x01, 0x00, 0x02, 0x61, 0x00, 0xd5, 0xf2, + 0xd2, 0xc2, 0xc5, 0x40, 0x58, 0x1c, 0x63, 0xab, 0x1f, 0xca, 0xa9, 0x6b, + 0x04, 0x13, 0x40, 0x72, 0x8c, 0x3b, 0x31, 0x68, 0xd0, 0x4b, 0x47, 0xe5, + 0x50, 0x48, 0xfc, 0x1e, 0x6a, 0xdd, 0x4d, 0x7f, 0x9d, 0x7a, 0x76, 0x79, + 0xb0, 0x0a, 0xd5, 0x10, 0x15, 0x88, 0xa0, 0x87, 0x02, 0x5a, 0xdd, 0x03, + 0x58, 0x6c, 0xff, 0x99, 0x90, 0xd2, 0xd9, 0x73, 0xea, 0xcb, 0x2f, 0x48, + 0x6c, 0x49, 0xe1, 0xc2, 0xd6, 0x77, 0x6e, 0x91, 0xe6, 0xc3, 0x5d, 0x49, + 0xd6, 0x4e, 0xb5, 0xa1, 0xd0, 0x30, 0xcb, 0x9b, 0x04, 0xdc, 0xfe, 0x12, + 0x91, 0x2f, 0xd2, 0xbe, 0x5e, 0xd6, 0xc6, 0x37, 0x2b, 0xe7, 0x02, 0x03, + 0x01, 0x00, 0x01, 0x02, 0x61, 0x00, 0xc7, 0x9d, 0x29, 0x1b, 0x64, 0x22, + 0x66, 0x93, 0x06, 0x42, 0xf1, 0x04, 0x8b, 0x11, 0x37, 0xda, 0x26, 0x75, + 0x39, 0x75, 0x38, 0xe2, 0x4c, 0x05, 0x10, 0xed, 0x58, 0xfc, 0xdf, 0x8e, + 0x19, 0xb2, 0xa4, 0xcd, 0xfe, 0x45, 0xf6, 0x28, 0x0f, 0x21, 0xec, 0x2c, + 0x77, 0x8a, 0xe4, 0x0b, 0x53, 0x1b, 0x71, 0x53, 0xd8, 0x4c, 0xde, 0x9a, + 0xed, 0x69, 0x45, 0x87, 0x67, 0x4f, 0xd6, 0x40, 0xb9, 0xc1, 0xd3, 0xc0, + 0x75, 0x58, 0xc1, 0x7f, 0x5a, 0x7b, 0x22, 0x2e, 0x2d, 0xe0, 0x76, 0x6d, + 0x3a, 0x89, 0x22, 0x75, 0xdb, 0x89, 0xcc, 0x29, 0x82, 0xf9, 0x3b, 0xa5, + 0x0c, 0x23, 0x4b, 0x16, 0x69, 0x41, 0x02, 0x31, 0x00, 0xff, 0xa4, 0x94, + 0xc5, 0xcc, 0x51, 0x5c, 0x95, 0x7b, 0x18, 0x70, 0x66, 0xbb, 0xe1, 0x48, + 0x6e, 0x74, 0x7a, 0xfd, 0xda, 0xc7, 0x68, 0x1b, 0xcc, 0x90, 0xd2, 0x99, + 0xfe, 0xd1, 0x88, 0xab, 0x52, 0x9f, 0xac, 0x72, 0x16, 0xae, 0x11, 0xa5, + 0xda, 0xf6, 0x2a, 0xa2, 0xde, 0xa1, 0x46, 0x0a, 0x17, 0x02, 0x31, 0x00, + 0xd6, 0x3f, 0x55, 0x03, 0x2b, 0x83, 0x2b, 0x63, 0x61, 0x40, 0x6f, 0xd6, + 0x32, 0x67, 0xfc, 0x59, 0x5f, 0x8e, 0x31, 0xe2, 0x50, 0xee, 0xfa, 0x67, + 0xdc, 0xa9, 0xad, 0x3d, 0xcf, 0xc2, 0xb1, 0x67, 0xcf, 0x35, 0x87, 0x90, + 0x6a, 0x07, 0xf0, 0x96, 0x3d, 0x12, 0x2c, 0x6c, 0x33, 0xf0, 0x9e, 0xb1, + 0x02, 0x31, 0x00, 0xe2, 0xa2, 0x11, 0x92, 0x59, 0x54, 0x37, 0xf3, 0xfd, + 0x30, 0xe9, 0x54, 0xea, 0x73, 0x0a, 0x37, 0x81, 0xe4, 0x17, 0x71, 0x01, + 0x1e, 0xf6, 0xa6, 0x20, 0x2d, 0xd4, 0x7f, 0xe7, 0x5a, 0x76, 0x27, 0xc5, + 0x8e, 0xa7, 0x86, 0x1b, 0xc3, 0x61, 0x7a, 0x00, 0x8a, 0x43, 0xe1, 0x82, + 0xb0, 0x08, 0xf9, 0x02, 0x30, 0x1e, 0x72, 0xcd, 0xc0, 0x2f, 0x94, 0xea, + 0xbb, 0xaa, 0xfc, 0x9f, 0x68, 0x4f, 0xad, 0x9b, 0x9d, 0x90, 0x09, 0x03, + 0x69, 0xe8, 0x9a, 0xae, 0x17, 0xc8, 0xb2, 0x8c, 0x9e, 0x76, 0x42, 0x73, + 0x38, 0x0f, 0xa8, 0xc9, 0xe5, 0x6a, 0x4c, 0x4a, 0x92, 0xef, 0x86, 0xff, + 0xad, 0x39, 0x0c, 0x7f, 0x61, 0x02, 0x30, 0x1a, 0xed, 0xb3, 0x4a, 0x05, + 0x99, 0x52, 0x58, 0xa3, 0xbe, 0xeb, 0x88, 0xa4, 0x01, 0x61, 0xb4, 0x7b, + 0xff, 0xc7, 0xde, 0x8a, 0xc6, 0x12, 0x3c, 0x74, 0xa0, 0xee, 0x1c, 0x1a, + 0xa1, 0x74, 0xb0, 0x53, 0xe8, 0xd4, 0xe2, 0x0d, 0xfd, 0xc3, 0xba, 0xe0, + 0xbc, 0xef, 0x96, 0x90, 0x6b, 0xb3, 0xc9, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x52, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x42, + 0x79, 0x77, 0x49, 0x42, 0x41, 0x41, 0x4a, 0x68, 0x41, 0x4e, 0x58, 0x79, + 0x30, 0x73, 0x4c, 0x46, 0x51, 0x46, 0x67, 0x63, 0x59, 0x36, 0x73, 0x66, + 0x79, 0x71, 0x6c, 0x72, 0x42, 0x42, 0x4e, 0x41, 0x63, 0x6f, 0x77, 0x37, + 0x4d, 0x57, 0x6a, 0x51, 0x53, 0x30, 0x66, 0x6c, 0x55, 0x45, 0x6a, 0x38, + 0x48, 0x6d, 0x72, 0x64, 0x54, 0x58, 0x2b, 0x64, 0x65, 0x6e, 0x5a, 0x35, + 0x0a, 0x73, 0x41, 0x72, 0x56, 0x45, 0x42, 0x57, 0x49, 0x6f, 0x49, 0x63, + 0x43, 0x57, 0x74, 0x30, 0x44, 0x57, 0x47, 0x7a, 0x2f, 0x6d, 0x5a, 0x44, + 0x53, 0x32, 0x58, 0x50, 0x71, 0x79, 0x79, 0x39, 0x49, 0x62, 0x45, 0x6e, + 0x68, 0x77, 0x74, 0x5a, 0x33, 0x62, 0x70, 0x48, 0x6d, 0x77, 0x31, 0x31, + 0x4a, 0x31, 0x6b, 0x36, 0x31, 0x6f, 0x64, 0x41, 0x77, 0x79, 0x35, 0x73, + 0x45, 0x33, 0x50, 0x34, 0x53, 0x0a, 0x6b, 0x53, 0x2f, 0x53, 0x76, 0x6c, + 0x37, 0x57, 0x78, 0x6a, 0x63, 0x72, 0x35, 0x77, 0x49, 0x44, 0x41, 0x51, + 0x41, 0x42, 0x41, 0x6d, 0x45, 0x41, 0x78, 0x35, 0x30, 0x70, 0x47, 0x32, + 0x51, 0x69, 0x5a, 0x70, 0x4d, 0x47, 0x51, 0x76, 0x45, 0x45, 0x69, 0x78, + 0x45, 0x33, 0x32, 0x69, 0x5a, 0x31, 0x4f, 0x58, 0x55, 0x34, 0x34, 0x6b, + 0x77, 0x46, 0x45, 0x4f, 0x31, 0x59, 0x2f, 0x4e, 0x2b, 0x4f, 0x0a, 0x47, + 0x62, 0x4b, 0x6b, 0x7a, 0x66, 0x35, 0x46, 0x39, 0x69, 0x67, 0x50, 0x49, + 0x65, 0x77, 0x73, 0x64, 0x34, 0x72, 0x6b, 0x43, 0x31, 0x4d, 0x62, 0x63, + 0x56, 0x50, 0x59, 0x54, 0x4e, 0x36, 0x61, 0x37, 0x57, 0x6c, 0x46, 0x68, + 0x32, 0x64, 0x50, 0x31, 0x6b, 0x43, 0x35, 0x77, 0x64, 0x50, 0x41, 0x64, + 0x56, 0x6a, 0x42, 0x66, 0x31, 0x70, 0x37, 0x49, 0x69, 0x34, 0x74, 0x34, + 0x48, 0x5a, 0x74, 0x0a, 0x4f, 0x6f, 0x6b, 0x69, 0x64, 0x64, 0x75, 0x4a, + 0x7a, 0x43, 0x6d, 0x43, 0x2b, 0x54, 0x75, 0x6c, 0x44, 0x43, 0x4e, 0x4c, + 0x46, 0x6d, 0x6c, 0x42, 0x41, 0x6a, 0x45, 0x41, 0x2f, 0x36, 0x53, 0x55, + 0x78, 0x63, 0x78, 0x52, 0x58, 0x4a, 0x56, 0x37, 0x47, 0x48, 0x42, 0x6d, + 0x75, 0x2b, 0x46, 0x49, 0x62, 0x6e, 0x52, 0x36, 0x2f, 0x64, 0x72, 0x48, + 0x61, 0x42, 0x76, 0x4d, 0x6b, 0x4e, 0x4b, 0x5a, 0x0a, 0x2f, 0x74, 0x47, + 0x49, 0x71, 0x31, 0x4b, 0x66, 0x72, 0x48, 0x49, 0x57, 0x72, 0x68, 0x47, + 0x6c, 0x32, 0x76, 0x59, 0x71, 0x6f, 0x74, 0x36, 0x68, 0x52, 0x67, 0x6f, + 0x58, 0x41, 0x6a, 0x45, 0x41, 0x31, 0x6a, 0x39, 0x56, 0x41, 0x79, 0x75, + 0x44, 0x4b, 0x32, 0x4e, 0x68, 0x51, 0x47, 0x2f, 0x57, 0x4d, 0x6d, 0x66, + 0x38, 0x57, 0x56, 0x2b, 0x4f, 0x4d, 0x65, 0x4a, 0x51, 0x37, 0x76, 0x70, + 0x6e, 0x0a, 0x33, 0x4b, 0x6d, 0x74, 0x50, 0x63, 0x2f, 0x43, 0x73, 0x57, + 0x66, 0x50, 0x4e, 0x59, 0x65, 0x51, 0x61, 0x67, 0x66, 0x77, 0x6c, 0x6a, + 0x30, 0x53, 0x4c, 0x47, 0x77, 0x7a, 0x38, 0x4a, 0x36, 0x78, 0x41, 0x6a, + 0x45, 0x41, 0x34, 0x71, 0x49, 0x52, 0x6b, 0x6c, 0x6c, 0x55, 0x4e, 0x2f, + 0x50, 0x39, 0x4d, 0x4f, 0x6c, 0x55, 0x36, 0x6e, 0x4d, 0x4b, 0x4e, 0x34, + 0x48, 0x6b, 0x46, 0x33, 0x45, 0x42, 0x0a, 0x48, 0x76, 0x61, 0x6d, 0x49, + 0x43, 0x33, 0x55, 0x66, 0x2b, 0x64, 0x61, 0x64, 0x69, 0x66, 0x46, 0x6a, + 0x71, 0x65, 0x47, 0x47, 0x38, 0x4e, 0x68, 0x65, 0x67, 0x43, 0x4b, 0x51, + 0x2b, 0x47, 0x43, 0x73, 0x41, 0x6a, 0x35, 0x41, 0x6a, 0x41, 0x65, 0x63, + 0x73, 0x33, 0x41, 0x4c, 0x35, 0x54, 0x71, 0x75, 0x36, 0x72, 0x38, 0x6e, + 0x32, 0x68, 0x50, 0x72, 0x5a, 0x75, 0x64, 0x6b, 0x41, 0x6b, 0x44, 0x0a, + 0x61, 0x65, 0x69, 0x61, 0x72, 0x68, 0x66, 0x49, 0x73, 0x6f, 0x79, 0x65, + 0x64, 0x6b, 0x4a, 0x7a, 0x4f, 0x41, 0x2b, 0x6f, 0x79, 0x65, 0x56, 0x71, + 0x54, 0x45, 0x71, 0x53, 0x37, 0x34, 0x62, 0x2f, 0x72, 0x54, 0x6b, 0x4d, + 0x66, 0x32, 0x45, 0x43, 0x4d, 0x42, 0x72, 0x74, 0x73, 0x30, 0x6f, 0x46, + 0x6d, 0x56, 0x4a, 0x59, 0x6f, 0x37, 0x37, 0x72, 0x69, 0x4b, 0x51, 0x42, + 0x59, 0x62, 0x52, 0x37, 0x0a, 0x2f, 0x38, 0x66, 0x65, 0x69, 0x73, 0x59, + 0x53, 0x50, 0x48, 0x53, 0x67, 0x37, 0x68, 0x77, 0x61, 0x6f, 0x58, 0x53, + 0x77, 0x55, 0x2b, 0x6a, 0x55, 0x34, 0x67, 0x33, 0x39, 0x77, 0x37, 0x72, + 0x67, 0x76, 0x4f, 0x2b, 0x57, 0x6b, 0x47, 0x75, 0x7a, 0x79, 0x51, 0x3d, + 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x52, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + public: undefined, + publicOrig: undefined + }, + what: 'Unencrypted RSA private key (OpenSSH format) with surrounding whitespace' + }, + { source: [ + '-----BEGIN RSA PRIVATE KEY-----', + 'Proc-Type: 4,ENCRYPTED', + 'DEK-Info: AES-128-CBC,E9F7397F94E585BCE4C739D5C5226A2D', + '', + 'mshPNeXq8XAh79N3F1vKUOCsWoTofhz/Kpt/ddYdgnGG9L5wGDoveJQtpsa5u+A4', + 'hCkAWA7KRCUKbOxZd8lG00cm7HILN7eHCKSoSkyA+X1jN7v1tSGPjiN2RSZFHCOI', + '8jEF0fyTeg9GZzFaiyyD5p1niy7uCssEeo8AzbIBX4yLDR18mmBiMhn0FxVu/YFn', + 'rfkm+ciE4WhkqdCUOkKiqIux7RI6O9ttUuZlhwVvoKZqujMeNrFppOC9kF5hAi6Y', + 'oHvrweE8QmimEUIMAwdDrI1/jglFw2x2K5sRxPIIFJC1aHR8nXLTuO1g2/g6aqls', + 'jGkwlQU4cU5pnkxkLUVCSHi6A6nwbdjQTIIDofpcckQVFHZaBCJx9UF691NR05Dc', + 'GthFFfNGXWU6vbwKia/LDX8R98ay4Ci2zoDXxe7QBnvnWbs2m69HPh6opmo5gKL+', + 'y65Sf6MAi5udIJLVfgXQBgRyX3glS7/rH8YbrrNY9Iop1KcSWXAIC4zlCoO3tBJB', + '9464YBD/R+qOvVMcBGxTjabudi00ougeTgsS7idDTmfc4HYIls0E7yWqzgspsws5', + 'w7K4crSgm+u3oypViUSre2qSvAHTC5gXth8LVCnDZuA=', + '-----END RSA PRIVATE KEY-----' + ].join('\n'), + expected: { + fulltype: undefined, + type: 'rsa', + curve: undefined, + extra: [ 'E9F7397F94E585BCE4C739D5C5226A2D' ], + comment: undefined, + encryption: 'aes-128-cbc', + private: new Buffer([ + 0x9a, 0xc8, 0x4f, 0x35, 0xe5, 0xea, 0xf1, 0x70, 0x21, 0xef, 0xd3, 0x77, + 0x17, 0x5b, 0xca, 0x50, 0xe0, 0xac, 0x5a, 0x84, 0xe8, 0x7e, 0x1c, 0xff, + 0x2a, 0x9b, 0x7f, 0x75, 0xd6, 0x1d, 0x82, 0x71, 0x86, 0xf4, 0xbe, 0x70, + 0x18, 0x3a, 0x2f, 0x78, 0x94, 0x2d, 0xa6, 0xc6, 0xb9, 0xbb, 0xe0, 0x38, + 0x84, 0x29, 0x00, 0x58, 0x0e, 0xca, 0x44, 0x25, 0x0a, 0x6c, 0xec, 0x59, + 0x77, 0xc9, 0x46, 0xd3, 0x47, 0x26, 0xec, 0x72, 0x0b, 0x37, 0xb7, 0x87, + 0x08, 0xa4, 0xa8, 0x4a, 0x4c, 0x80, 0xf9, 0x7d, 0x63, 0x37, 0xbb, 0xf5, + 0xb5, 0x21, 0x8f, 0x8e, 0x23, 0x76, 0x45, 0x26, 0x45, 0x1c, 0x23, 0x88, + 0xf2, 0x31, 0x05, 0xd1, 0xfc, 0x93, 0x7a, 0x0f, 0x46, 0x67, 0x31, 0x5a, + 0x8b, 0x2c, 0x83, 0xe6, 0x9d, 0x67, 0x8b, 0x2e, 0xee, 0x0a, 0xcb, 0x04, + 0x7a, 0x8f, 0x00, 0xcd, 0xb2, 0x01, 0x5f, 0x8c, 0x8b, 0x0d, 0x1d, 0x7c, + 0x9a, 0x60, 0x62, 0x32, 0x19, 0xf4, 0x17, 0x15, 0x6e, 0xfd, 0x81, 0x67, + 0xad, 0xf9, 0x26, 0xf9, 0xc8, 0x84, 0xe1, 0x68, 0x64, 0xa9, 0xd0, 0x94, + 0x3a, 0x42, 0xa2, 0xa8, 0x8b, 0xb1, 0xed, 0x12, 0x3a, 0x3b, 0xdb, 0x6d, + 0x52, 0xe6, 0x65, 0x87, 0x05, 0x6f, 0xa0, 0xa6, 0x6a, 0xba, 0x33, 0x1e, + 0x36, 0xb1, 0x69, 0xa4, 0xe0, 0xbd, 0x90, 0x5e, 0x61, 0x02, 0x2e, 0x98, + 0xa0, 0x7b, 0xeb, 0xc1, 0xe1, 0x3c, 0x42, 0x68, 0xa6, 0x11, 0x42, 0x0c, + 0x03, 0x07, 0x43, 0xac, 0x8d, 0x7f, 0x8e, 0x09, 0x45, 0xc3, 0x6c, 0x76, + 0x2b, 0x9b, 0x11, 0xc4, 0xf2, 0x08, 0x14, 0x90, 0xb5, 0x68, 0x74, 0x7c, + 0x9d, 0x72, 0xd3, 0xb8, 0xed, 0x60, 0xdb, 0xf8, 0x3a, 0x6a, 0xa9, 0x6c, + 0x8c, 0x69, 0x30, 0x95, 0x05, 0x38, 0x71, 0x4e, 0x69, 0x9e, 0x4c, 0x64, + 0x2d, 0x45, 0x42, 0x48, 0x78, 0xba, 0x03, 0xa9, 0xf0, 0x6d, 0xd8, 0xd0, + 0x4c, 0x82, 0x03, 0xa1, 0xfa, 0x5c, 0x72, 0x44, 0x15, 0x14, 0x76, 0x5a, + 0x04, 0x22, 0x71, 0xf5, 0x41, 0x7a, 0xf7, 0x53, 0x51, 0xd3, 0x90, 0xdc, + 0x1a, 0xd8, 0x45, 0x15, 0xf3, 0x46, 0x5d, 0x65, 0x3a, 0xbd, 0xbc, 0x0a, + 0x89, 0xaf, 0xcb, 0x0d, 0x7f, 0x11, 0xf7, 0xc6, 0xb2, 0xe0, 0x28, 0xb6, + 0xce, 0x80, 0xd7, 0xc5, 0xee, 0xd0, 0x06, 0x7b, 0xe7, 0x59, 0xbb, 0x36, + 0x9b, 0xaf, 0x47, 0x3e, 0x1e, 0xa8, 0xa6, 0x6a, 0x39, 0x80, 0xa2, 0xfe, + 0xcb, 0xae, 0x52, 0x7f, 0xa3, 0x00, 0x8b, 0x9b, 0x9d, 0x20, 0x92, 0xd5, + 0x7e, 0x05, 0xd0, 0x06, 0x04, 0x72, 0x5f, 0x78, 0x25, 0x4b, 0xbf, 0xeb, + 0x1f, 0xc6, 0x1b, 0xae, 0xb3, 0x58, 0xf4, 0x8a, 0x29, 0xd4, 0xa7, 0x12, + 0x59, 0x70, 0x08, 0x0b, 0x8c, 0xe5, 0x0a, 0x83, 0xb7, 0xb4, 0x12, 0x41, + 0xf7, 0x8e, 0xb8, 0x60, 0x10, 0xff, 0x47, 0xea, 0x8e, 0xbd, 0x53, 0x1c, + 0x04, 0x6c, 0x53, 0x8d, 0xa6, 0xee, 0x76, 0x2d, 0x34, 0xa2, 0xe8, 0x1e, + 0x4e, 0x0b, 0x12, 0xee, 0x27, 0x43, 0x4e, 0x67, 0xdc, 0xe0, 0x76, 0x08, + 0x96, 0xcd, 0x04, 0xef, 0x25, 0xaa, 0xce, 0x0b, 0x29, 0xb3, 0x0b, 0x39, + 0xc3, 0xb2, 0xb8, 0x72, 0xb4, 0xa0, 0x9b, 0xeb, 0xb7, 0xa3, 0x2a, 0x55, + 0x89, 0x44, 0xab, 0x7b, 0x6a, 0x92, 0xbc, 0x01, 0xd3, 0x0b, 0x98, 0x17, + 0xb6, 0x1f, 0x0b, 0x54, 0x29, 0xc3, 0x66, 0xe0, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x52, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x50, 0x72, 0x6f, 0x63, + 0x2d, 0x54, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x34, 0x2c, 0x45, 0x4e, 0x43, + 0x52, 0x59, 0x50, 0x54, 0x45, 0x44, 0x0a, 0x44, 0x45, 0x4b, 0x2d, 0x49, + 0x6e, 0x66, 0x6f, 0x3a, 0x20, 0x41, 0x45, 0x53, 0x2d, 0x31, 0x32, 0x38, + 0x2d, 0x43, 0x42, 0x43, 0x2c, 0x45, 0x39, 0x46, 0x37, 0x33, 0x39, 0x37, + 0x46, 0x39, 0x34, 0x45, 0x35, 0x38, 0x35, 0x42, 0x43, 0x45, 0x34, 0x43, + 0x37, 0x33, 0x39, 0x44, 0x35, 0x43, 0x35, 0x32, 0x32, 0x36, 0x41, 0x32, + 0x44, 0x0a, 0x0a, 0x6d, 0x73, 0x68, 0x50, 0x4e, 0x65, 0x58, 0x71, 0x38, + 0x58, 0x41, 0x68, 0x37, 0x39, 0x4e, 0x33, 0x46, 0x31, 0x76, 0x4b, 0x55, + 0x4f, 0x43, 0x73, 0x57, 0x6f, 0x54, 0x6f, 0x66, 0x68, 0x7a, 0x2f, 0x4b, + 0x70, 0x74, 0x2f, 0x64, 0x64, 0x59, 0x64, 0x67, 0x6e, 0x47, 0x47, 0x39, + 0x4c, 0x35, 0x77, 0x47, 0x44, 0x6f, 0x76, 0x65, 0x4a, 0x51, 0x74, 0x70, + 0x73, 0x61, 0x35, 0x75, 0x2b, 0x41, 0x34, 0x0a, 0x68, 0x43, 0x6b, 0x41, + 0x57, 0x41, 0x37, 0x4b, 0x52, 0x43, 0x55, 0x4b, 0x62, 0x4f, 0x78, 0x5a, + 0x64, 0x38, 0x6c, 0x47, 0x30, 0x30, 0x63, 0x6d, 0x37, 0x48, 0x49, 0x4c, + 0x4e, 0x37, 0x65, 0x48, 0x43, 0x4b, 0x53, 0x6f, 0x53, 0x6b, 0x79, 0x41, + 0x2b, 0x58, 0x31, 0x6a, 0x4e, 0x37, 0x76, 0x31, 0x74, 0x53, 0x47, 0x50, + 0x6a, 0x69, 0x4e, 0x32, 0x52, 0x53, 0x5a, 0x46, 0x48, 0x43, 0x4f, 0x49, + 0x0a, 0x38, 0x6a, 0x45, 0x46, 0x30, 0x66, 0x79, 0x54, 0x65, 0x67, 0x39, + 0x47, 0x5a, 0x7a, 0x46, 0x61, 0x69, 0x79, 0x79, 0x44, 0x35, 0x70, 0x31, + 0x6e, 0x69, 0x79, 0x37, 0x75, 0x43, 0x73, 0x73, 0x45, 0x65, 0x6f, 0x38, + 0x41, 0x7a, 0x62, 0x49, 0x42, 0x58, 0x34, 0x79, 0x4c, 0x44, 0x52, 0x31, + 0x38, 0x6d, 0x6d, 0x42, 0x69, 0x4d, 0x68, 0x6e, 0x30, 0x46, 0x78, 0x56, + 0x75, 0x2f, 0x59, 0x46, 0x6e, 0x0a, 0x72, 0x66, 0x6b, 0x6d, 0x2b, 0x63, + 0x69, 0x45, 0x34, 0x57, 0x68, 0x6b, 0x71, 0x64, 0x43, 0x55, 0x4f, 0x6b, + 0x4b, 0x69, 0x71, 0x49, 0x75, 0x78, 0x37, 0x52, 0x49, 0x36, 0x4f, 0x39, + 0x74, 0x74, 0x55, 0x75, 0x5a, 0x6c, 0x68, 0x77, 0x56, 0x76, 0x6f, 0x4b, + 0x5a, 0x71, 0x75, 0x6a, 0x4d, 0x65, 0x4e, 0x72, 0x46, 0x70, 0x70, 0x4f, + 0x43, 0x39, 0x6b, 0x46, 0x35, 0x68, 0x41, 0x69, 0x36, 0x59, 0x0a, 0x6f, + 0x48, 0x76, 0x72, 0x77, 0x65, 0x45, 0x38, 0x51, 0x6d, 0x69, 0x6d, 0x45, + 0x55, 0x49, 0x4d, 0x41, 0x77, 0x64, 0x44, 0x72, 0x49, 0x31, 0x2f, 0x6a, + 0x67, 0x6c, 0x46, 0x77, 0x32, 0x78, 0x32, 0x4b, 0x35, 0x73, 0x52, 0x78, + 0x50, 0x49, 0x49, 0x46, 0x4a, 0x43, 0x31, 0x61, 0x48, 0x52, 0x38, 0x6e, + 0x58, 0x4c, 0x54, 0x75, 0x4f, 0x31, 0x67, 0x32, 0x2f, 0x67, 0x36, 0x61, + 0x71, 0x6c, 0x73, 0x0a, 0x6a, 0x47, 0x6b, 0x77, 0x6c, 0x51, 0x55, 0x34, + 0x63, 0x55, 0x35, 0x70, 0x6e, 0x6b, 0x78, 0x6b, 0x4c, 0x55, 0x56, 0x43, + 0x53, 0x48, 0x69, 0x36, 0x41, 0x36, 0x6e, 0x77, 0x62, 0x64, 0x6a, 0x51, + 0x54, 0x49, 0x49, 0x44, 0x6f, 0x66, 0x70, 0x63, 0x63, 0x6b, 0x51, 0x56, + 0x46, 0x48, 0x5a, 0x61, 0x42, 0x43, 0x4a, 0x78, 0x39, 0x55, 0x46, 0x36, + 0x39, 0x31, 0x4e, 0x52, 0x30, 0x35, 0x44, 0x63, 0x0a, 0x47, 0x74, 0x68, + 0x46, 0x46, 0x66, 0x4e, 0x47, 0x58, 0x57, 0x55, 0x36, 0x76, 0x62, 0x77, + 0x4b, 0x69, 0x61, 0x2f, 0x4c, 0x44, 0x58, 0x38, 0x52, 0x39, 0x38, 0x61, + 0x79, 0x34, 0x43, 0x69, 0x32, 0x7a, 0x6f, 0x44, 0x58, 0x78, 0x65, 0x37, + 0x51, 0x42, 0x6e, 0x76, 0x6e, 0x57, 0x62, 0x73, 0x32, 0x6d, 0x36, 0x39, + 0x48, 0x50, 0x68, 0x36, 0x6f, 0x70, 0x6d, 0x6f, 0x35, 0x67, 0x4b, 0x4c, + 0x2b, 0x0a, 0x79, 0x36, 0x35, 0x53, 0x66, 0x36, 0x4d, 0x41, 0x69, 0x35, + 0x75, 0x64, 0x49, 0x4a, 0x4c, 0x56, 0x66, 0x67, 0x58, 0x51, 0x42, 0x67, + 0x52, 0x79, 0x58, 0x33, 0x67, 0x6c, 0x53, 0x37, 0x2f, 0x72, 0x48, 0x38, + 0x59, 0x62, 0x72, 0x72, 0x4e, 0x59, 0x39, 0x49, 0x6f, 0x70, 0x31, 0x4b, + 0x63, 0x53, 0x57, 0x58, 0x41, 0x49, 0x43, 0x34, 0x7a, 0x6c, 0x43, 0x6f, + 0x4f, 0x33, 0x74, 0x42, 0x4a, 0x42, 0x0a, 0x39, 0x34, 0x36, 0x34, 0x59, + 0x42, 0x44, 0x2f, 0x52, 0x2b, 0x71, 0x4f, 0x76, 0x56, 0x4d, 0x63, 0x42, + 0x47, 0x78, 0x54, 0x6a, 0x61, 0x62, 0x75, 0x64, 0x69, 0x30, 0x30, 0x6f, + 0x75, 0x67, 0x65, 0x54, 0x67, 0x73, 0x53, 0x37, 0x69, 0x64, 0x44, 0x54, + 0x6d, 0x66, 0x63, 0x34, 0x48, 0x59, 0x49, 0x6c, 0x73, 0x30, 0x45, 0x37, + 0x79, 0x57, 0x71, 0x7a, 0x67, 0x73, 0x70, 0x73, 0x77, 0x73, 0x35, 0x0a, + 0x77, 0x37, 0x4b, 0x34, 0x63, 0x72, 0x53, 0x67, 0x6d, 0x2b, 0x75, 0x33, + 0x6f, 0x79, 0x70, 0x56, 0x69, 0x55, 0x53, 0x72, 0x65, 0x32, 0x71, 0x53, + 0x76, 0x41, 0x48, 0x54, 0x43, 0x35, 0x67, 0x58, 0x74, 0x68, 0x38, 0x4c, + 0x56, 0x43, 0x6e, 0x44, 0x5a, 0x75, 0x41, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x52, 0x53, 0x41, 0x20, 0x50, 0x52, + 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, + ]), + public: undefined, + publicOrig: undefined + }, + what: 'Encrypted RSA private key (OpenSSH format)' + }, + { source: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRLlnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0fxoLSF3lxQFJ+32/rS0=', + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x72, 0x73, 0x61, 0x00, + 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, 0xe5, + 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, 0xb5, + 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, 0x5f, + 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, 0x33, + 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, 0xb1, + 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, 0xde, + 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, 0x02, + 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, 0x0b, + 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x73, 0x73, 0x68, 0x2d, 0x72, 0x73, 0x61, 0x20, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, 0x41, + 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, 0x41, + 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, 0x78, + 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, 0x72, + 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, 0x66, + 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x6c, 0x6e, 0x52, 0x35, 0x4f, 0x36, + 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, 0x65, 0x4f, + 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, 0x57, 0x6f, + 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, 0x72, 0x65, + 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, 0x76, 0x78, + 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, 0x52, 0x36, + 0x68, 0x51, 0x58, 0x30, 0x66, 0x78, 0x6f, 0x4c, 0x53, 0x46, 0x33, 0x6c, + 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, 0x53, 0x30, 0x3d, + ]) + }, + what: 'RSA public key (OpenSSH format)' + }, + { source: 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAYQD3txsEf0HAKElAFUvIXzsM98gfPlIbG4/GlqbYYBulkHu6z0laOdoT14Zx2M+3q+9RjhTZjHxyMfePdcgNK9z98V6tOz5bIQhtMS8tl1Tnw5qZByGqpqOKf665ev62LaM= testing-ssh2-from-node.js', + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'testing-ssh2-from-node.js', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x72, 0x73, 0x61, 0x00, + 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, 0xf7, + 0xb7, 0x1b, 0x04, 0x7f, 0x41, 0xc0, 0x28, 0x49, 0x40, 0x15, 0x4b, 0xc8, + 0x5f, 0x3b, 0x0c, 0xf7, 0xc8, 0x1f, 0x3e, 0x52, 0x1b, 0x1b, 0x8f, 0xc6, + 0x96, 0xa6, 0xd8, 0x60, 0x1b, 0xa5, 0x90, 0x7b, 0xba, 0xcf, 0x49, 0x5a, + 0x39, 0xda, 0x13, 0xd7, 0x86, 0x71, 0xd8, 0xcf, 0xb7, 0xab, 0xef, 0x51, + 0x8e, 0x14, 0xd9, 0x8c, 0x7c, 0x72, 0x31, 0xf7, 0x8f, 0x75, 0xc8, 0x0d, + 0x2b, 0xdc, 0xfd, 0xf1, 0x5e, 0xad, 0x3b, 0x3e, 0x5b, 0x21, 0x08, 0x6d, + 0x31, 0x2f, 0x2d, 0x97, 0x54, 0xe7, 0xc3, 0x9a, 0x99, 0x07, 0x21, 0xaa, + 0xa6, 0xa3, 0x8a, 0x7f, 0xae, 0xb9, 0x7a, 0xfe, 0xb6, 0x2d, 0xa3, + ]), + publicOrig: new Buffer([ + 0x73, 0x73, 0x68, 0x2d, 0x72, 0x73, 0x61, 0x20, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, 0x41, + 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, 0x41, + 0x59, 0x51, 0x44, 0x33, 0x74, 0x78, 0x73, 0x45, 0x66, 0x30, 0x48, 0x41, + 0x4b, 0x45, 0x6c, 0x41, 0x46, 0x55, 0x76, 0x49, 0x58, 0x7a, 0x73, 0x4d, + 0x39, 0x38, 0x67, 0x66, 0x50, 0x6c, 0x49, 0x62, 0x47, 0x34, 0x2f, 0x47, + 0x6c, 0x71, 0x62, 0x59, 0x59, 0x42, 0x75, 0x6c, 0x6b, 0x48, 0x75, 0x36, + 0x7a, 0x30, 0x6c, 0x61, 0x4f, 0x64, 0x6f, 0x54, 0x31, 0x34, 0x5a, 0x78, + 0x32, 0x4d, 0x2b, 0x33, 0x71, 0x2b, 0x39, 0x52, 0x6a, 0x68, 0x54, 0x5a, + 0x6a, 0x48, 0x78, 0x79, 0x4d, 0x66, 0x65, 0x50, 0x64, 0x63, 0x67, 0x4e, + 0x4b, 0x39, 0x7a, 0x39, 0x38, 0x56, 0x36, 0x74, 0x4f, 0x7a, 0x35, 0x62, + 0x49, 0x51, 0x68, 0x74, 0x4d, 0x53, 0x38, 0x74, 0x6c, 0x31, 0x54, 0x6e, + 0x77, 0x35, 0x71, 0x5a, 0x42, 0x79, 0x47, 0x71, 0x70, 0x71, 0x4f, 0x4b, + 0x66, 0x36, 0x36, 0x35, 0x65, 0x76, 0x36, 0x32, 0x4c, 0x61, 0x4d, 0x3d, + 0x20, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x2d, 0x73, 0x73, 0x68, + 0x32, 0x2d, 0x66, 0x72, 0x6f, 0x6d, 0x2d, 0x6e, 0x6f, 0x64, 0x65, 0x2e, + 0x6a, 0x73, + ]) + }, + what: 'RSA public key with comment (OpenSSH format)' + }, + { source: [ + '-----BEGIN DSA PRIVATE KEY-----', + 'MIIBuwIBAAKBgQD7v1kR31D2NhZIzOjJL1hPzvw79K3eWZqQEgqKmeB+P9MhOx51', + 'MOZrvs3hZoYqmsNxT/Y29EwQ+o+SXpTYjJhw/s2vR0AeJBj32l8weD804+T+S8yr', + 'MJBdgsc5AbV2XYKnsNzl65kAEOLPCobUNysKqYnJ8naYCYL3jjwGhW36iwIVANqh', + 'gZgvnVpVXyYx1GlceA5/8mI1AoGBAOyjuQhGoOW4hILR02WrqwCEPqhRyMp87dMU', + '9z0PQKKrACmiQpEUawvtR4aMB7Xzy+f6MRLwdonQISzswopa7Gwl3CYLiOdKFFIQ', + 'zDdymwGovTjN2fQm8v/UHdafZBPImGBDzf+iykNBhZc7UP1rciMzKOnT3BTF/al8', + 'hhBy0bjsAoGAYrykryPYWUGArKS6NO9Ijtwc7SbbNXZCuBcl6hc1Hdr1UEEKQcsW', + '5fajF5Ut9ZLwcggsWSKZtZk9NA7xybmObuDEU7oiFFNRAbBkam/v/3a3bwvSHGKg', + 'q15cvC313zg1ii9NXylvBIoIS8EWfekl1LMvHC7NdJ41wPYhhmPrUk4CFHJjavJH', + 'kb6tDtemyiAq3N9aQj/D', + '-----END DSA PRIVATE KEY-----' + ].join('\n'), + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: new Buffer([ + 0x30, 0x82, 0x01, 0xbb, 0x02, 0x01, 0x00, 0x02, 0x81, 0x81, 0x00, 0xfb, + 0xbf, 0x59, 0x11, 0xdf, 0x50, 0xf6, 0x36, 0x16, 0x48, 0xcc, 0xe8, 0xc9, + 0x2f, 0x58, 0x4f, 0xce, 0xfc, 0x3b, 0xf4, 0xad, 0xde, 0x59, 0x9a, 0x90, + 0x12, 0x0a, 0x8a, 0x99, 0xe0, 0x7e, 0x3f, 0xd3, 0x21, 0x3b, 0x1e, 0x75, + 0x30, 0xe6, 0x6b, 0xbe, 0xcd, 0xe1, 0x66, 0x86, 0x2a, 0x9a, 0xc3, 0x71, + 0x4f, 0xf6, 0x36, 0xf4, 0x4c, 0x10, 0xfa, 0x8f, 0x92, 0x5e, 0x94, 0xd8, + 0x8c, 0x98, 0x70, 0xfe, 0xcd, 0xaf, 0x47, 0x40, 0x1e, 0x24, 0x18, 0xf7, + 0xda, 0x5f, 0x30, 0x78, 0x3f, 0x34, 0xe3, 0xe4, 0xfe, 0x4b, 0xcc, 0xab, + 0x30, 0x90, 0x5d, 0x82, 0xc7, 0x39, 0x01, 0xb5, 0x76, 0x5d, 0x82, 0xa7, + 0xb0, 0xdc, 0xe5, 0xeb, 0x99, 0x00, 0x10, 0xe2, 0xcf, 0x0a, 0x86, 0xd4, + 0x37, 0x2b, 0x0a, 0xa9, 0x89, 0xc9, 0xf2, 0x76, 0x98, 0x09, 0x82, 0xf7, + 0x8e, 0x3c, 0x06, 0x85, 0x6d, 0xfa, 0x8b, 0x02, 0x15, 0x00, 0xda, 0xa1, + 0x81, 0x98, 0x2f, 0x9d, 0x5a, 0x55, 0x5f, 0x26, 0x31, 0xd4, 0x69, 0x5c, + 0x78, 0x0e, 0x7f, 0xf2, 0x62, 0x35, 0x02, 0x81, 0x81, 0x00, 0xec, 0xa3, + 0xb9, 0x08, 0x46, 0xa0, 0xe5, 0xb8, 0x84, 0x82, 0xd1, 0xd3, 0x65, 0xab, + 0xab, 0x00, 0x84, 0x3e, 0xa8, 0x51, 0xc8, 0xca, 0x7c, 0xed, 0xd3, 0x14, + 0xf7, 0x3d, 0x0f, 0x40, 0xa2, 0xab, 0x00, 0x29, 0xa2, 0x42, 0x91, 0x14, + 0x6b, 0x0b, 0xed, 0x47, 0x86, 0x8c, 0x07, 0xb5, 0xf3, 0xcb, 0xe7, 0xfa, + 0x31, 0x12, 0xf0, 0x76, 0x89, 0xd0, 0x21, 0x2c, 0xec, 0xc2, 0x8a, 0x5a, + 0xec, 0x6c, 0x25, 0xdc, 0x26, 0x0b, 0x88, 0xe7, 0x4a, 0x14, 0x52, 0x10, + 0xcc, 0x37, 0x72, 0x9b, 0x01, 0xa8, 0xbd, 0x38, 0xcd, 0xd9, 0xf4, 0x26, + 0xf2, 0xff, 0xd4, 0x1d, 0xd6, 0x9f, 0x64, 0x13, 0xc8, 0x98, 0x60, 0x43, + 0xcd, 0xff, 0xa2, 0xca, 0x43, 0x41, 0x85, 0x97, 0x3b, 0x50, 0xfd, 0x6b, + 0x72, 0x23, 0x33, 0x28, 0xe9, 0xd3, 0xdc, 0x14, 0xc5, 0xfd, 0xa9, 0x7c, + 0x86, 0x10, 0x72, 0xd1, 0xb8, 0xec, 0x02, 0x81, 0x80, 0x62, 0xbc, 0xa4, + 0xaf, 0x23, 0xd8, 0x59, 0x41, 0x80, 0xac, 0xa4, 0xba, 0x34, 0xef, 0x48, + 0x8e, 0xdc, 0x1c, 0xed, 0x26, 0xdb, 0x35, 0x76, 0x42, 0xb8, 0x17, 0x25, + 0xea, 0x17, 0x35, 0x1d, 0xda, 0xf5, 0x50, 0x41, 0x0a, 0x41, 0xcb, 0x16, + 0xe5, 0xf6, 0xa3, 0x17, 0x95, 0x2d, 0xf5, 0x92, 0xf0, 0x72, 0x08, 0x2c, + 0x59, 0x22, 0x99, 0xb5, 0x99, 0x3d, 0x34, 0x0e, 0xf1, 0xc9, 0xb9, 0x8e, + 0x6e, 0xe0, 0xc4, 0x53, 0xba, 0x22, 0x14, 0x53, 0x51, 0x01, 0xb0, 0x64, + 0x6a, 0x6f, 0xef, 0xff, 0x76, 0xb7, 0x6f, 0x0b, 0xd2, 0x1c, 0x62, 0xa0, + 0xab, 0x5e, 0x5c, 0xbc, 0x2d, 0xf5, 0xdf, 0x38, 0x35, 0x8a, 0x2f, 0x4d, + 0x5f, 0x29, 0x6f, 0x04, 0x8a, 0x08, 0x4b, 0xc1, 0x16, 0x7d, 0xe9, 0x25, + 0xd4, 0xb3, 0x2f, 0x1c, 0x2e, 0xcd, 0x74, 0x9e, 0x35, 0xc0, 0xf6, 0x21, + 0x86, 0x63, 0xeb, 0x52, 0x4e, 0x02, 0x14, 0x72, 0x63, 0x6a, 0xf2, 0x47, + 0x91, 0xbe, 0xad, 0x0e, 0xd7, 0xa6, 0xca, 0x20, 0x2a, 0xdc, 0xdf, 0x5a, + 0x42, 0x3f, 0xc3, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x44, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x42, + 0x75, 0x77, 0x49, 0x42, 0x41, 0x41, 0x4b, 0x42, 0x67, 0x51, 0x44, 0x37, + 0x76, 0x31, 0x6b, 0x52, 0x33, 0x31, 0x44, 0x32, 0x4e, 0x68, 0x5a, 0x49, + 0x7a, 0x4f, 0x6a, 0x4a, 0x4c, 0x31, 0x68, 0x50, 0x7a, 0x76, 0x77, 0x37, + 0x39, 0x4b, 0x33, 0x65, 0x57, 0x5a, 0x71, 0x51, 0x45, 0x67, 0x71, 0x4b, + 0x6d, 0x65, 0x42, 0x2b, 0x50, 0x39, 0x4d, 0x68, 0x4f, 0x78, 0x35, 0x31, + 0x0a, 0x4d, 0x4f, 0x5a, 0x72, 0x76, 0x73, 0x33, 0x68, 0x5a, 0x6f, 0x59, + 0x71, 0x6d, 0x73, 0x4e, 0x78, 0x54, 0x2f, 0x59, 0x32, 0x39, 0x45, 0x77, + 0x51, 0x2b, 0x6f, 0x2b, 0x53, 0x58, 0x70, 0x54, 0x59, 0x6a, 0x4a, 0x68, + 0x77, 0x2f, 0x73, 0x32, 0x76, 0x52, 0x30, 0x41, 0x65, 0x4a, 0x42, 0x6a, + 0x33, 0x32, 0x6c, 0x38, 0x77, 0x65, 0x44, 0x38, 0x30, 0x34, 0x2b, 0x54, + 0x2b, 0x53, 0x38, 0x79, 0x72, 0x0a, 0x4d, 0x4a, 0x42, 0x64, 0x67, 0x73, + 0x63, 0x35, 0x41, 0x62, 0x56, 0x32, 0x58, 0x59, 0x4b, 0x6e, 0x73, 0x4e, + 0x7a, 0x6c, 0x36, 0x35, 0x6b, 0x41, 0x45, 0x4f, 0x4c, 0x50, 0x43, 0x6f, + 0x62, 0x55, 0x4e, 0x79, 0x73, 0x4b, 0x71, 0x59, 0x6e, 0x4a, 0x38, 0x6e, + 0x61, 0x59, 0x43, 0x59, 0x4c, 0x33, 0x6a, 0x6a, 0x77, 0x47, 0x68, 0x57, + 0x33, 0x36, 0x69, 0x77, 0x49, 0x56, 0x41, 0x4e, 0x71, 0x68, 0x0a, 0x67, + 0x5a, 0x67, 0x76, 0x6e, 0x56, 0x70, 0x56, 0x58, 0x79, 0x59, 0x78, 0x31, + 0x47, 0x6c, 0x63, 0x65, 0x41, 0x35, 0x2f, 0x38, 0x6d, 0x49, 0x31, 0x41, + 0x6f, 0x47, 0x42, 0x41, 0x4f, 0x79, 0x6a, 0x75, 0x51, 0x68, 0x47, 0x6f, + 0x4f, 0x57, 0x34, 0x68, 0x49, 0x4c, 0x52, 0x30, 0x32, 0x57, 0x72, 0x71, + 0x77, 0x43, 0x45, 0x50, 0x71, 0x68, 0x52, 0x79, 0x4d, 0x70, 0x38, 0x37, + 0x64, 0x4d, 0x55, 0x0a, 0x39, 0x7a, 0x30, 0x50, 0x51, 0x4b, 0x4b, 0x72, + 0x41, 0x43, 0x6d, 0x69, 0x51, 0x70, 0x45, 0x55, 0x61, 0x77, 0x76, 0x74, + 0x52, 0x34, 0x61, 0x4d, 0x42, 0x37, 0x58, 0x7a, 0x79, 0x2b, 0x66, 0x36, + 0x4d, 0x52, 0x4c, 0x77, 0x64, 0x6f, 0x6e, 0x51, 0x49, 0x53, 0x7a, 0x73, + 0x77, 0x6f, 0x70, 0x61, 0x37, 0x47, 0x77, 0x6c, 0x33, 0x43, 0x59, 0x4c, + 0x69, 0x4f, 0x64, 0x4b, 0x46, 0x46, 0x49, 0x51, 0x0a, 0x7a, 0x44, 0x64, + 0x79, 0x6d, 0x77, 0x47, 0x6f, 0x76, 0x54, 0x6a, 0x4e, 0x32, 0x66, 0x51, + 0x6d, 0x38, 0x76, 0x2f, 0x55, 0x48, 0x64, 0x61, 0x66, 0x5a, 0x42, 0x50, + 0x49, 0x6d, 0x47, 0x42, 0x44, 0x7a, 0x66, 0x2b, 0x69, 0x79, 0x6b, 0x4e, + 0x42, 0x68, 0x5a, 0x63, 0x37, 0x55, 0x50, 0x31, 0x72, 0x63, 0x69, 0x4d, + 0x7a, 0x4b, 0x4f, 0x6e, 0x54, 0x33, 0x42, 0x54, 0x46, 0x2f, 0x61, 0x6c, + 0x38, 0x0a, 0x68, 0x68, 0x42, 0x79, 0x30, 0x62, 0x6a, 0x73, 0x41, 0x6f, + 0x47, 0x41, 0x59, 0x72, 0x79, 0x6b, 0x72, 0x79, 0x50, 0x59, 0x57, 0x55, + 0x47, 0x41, 0x72, 0x4b, 0x53, 0x36, 0x4e, 0x4f, 0x39, 0x49, 0x6a, 0x74, + 0x77, 0x63, 0x37, 0x53, 0x62, 0x62, 0x4e, 0x58, 0x5a, 0x43, 0x75, 0x42, + 0x63, 0x6c, 0x36, 0x68, 0x63, 0x31, 0x48, 0x64, 0x72, 0x31, 0x55, 0x45, + 0x45, 0x4b, 0x51, 0x63, 0x73, 0x57, 0x0a, 0x35, 0x66, 0x61, 0x6a, 0x46, + 0x35, 0x55, 0x74, 0x39, 0x5a, 0x4c, 0x77, 0x63, 0x67, 0x67, 0x73, 0x57, + 0x53, 0x4b, 0x5a, 0x74, 0x5a, 0x6b, 0x39, 0x4e, 0x41, 0x37, 0x78, 0x79, + 0x62, 0x6d, 0x4f, 0x62, 0x75, 0x44, 0x45, 0x55, 0x37, 0x6f, 0x69, 0x46, + 0x46, 0x4e, 0x52, 0x41, 0x62, 0x42, 0x6b, 0x61, 0x6d, 0x2f, 0x76, 0x2f, + 0x33, 0x61, 0x33, 0x62, 0x77, 0x76, 0x53, 0x48, 0x47, 0x4b, 0x67, 0x0a, + 0x71, 0x31, 0x35, 0x63, 0x76, 0x43, 0x33, 0x31, 0x33, 0x7a, 0x67, 0x31, + 0x69, 0x69, 0x39, 0x4e, 0x58, 0x79, 0x6c, 0x76, 0x42, 0x49, 0x6f, 0x49, + 0x53, 0x38, 0x45, 0x57, 0x66, 0x65, 0x6b, 0x6c, 0x31, 0x4c, 0x4d, 0x76, + 0x48, 0x43, 0x37, 0x4e, 0x64, 0x4a, 0x34, 0x31, 0x77, 0x50, 0x59, 0x68, + 0x68, 0x6d, 0x50, 0x72, 0x55, 0x6b, 0x34, 0x43, 0x46, 0x48, 0x4a, 0x6a, + 0x61, 0x76, 0x4a, 0x48, 0x0a, 0x6b, 0x62, 0x36, 0x74, 0x44, 0x74, 0x65, + 0x6d, 0x79, 0x69, 0x41, 0x71, 0x33, 0x4e, 0x39, 0x61, 0x51, 0x6a, 0x2f, + 0x44, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x44, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + public: undefined, + publicOrig: undefined + }, + what: 'Unencrypted DSA private key (OpenSSH format)' + }, + { source: [ + '-----BEGIN DSA PRIVATE KEY-----', + 'Proc-Type: 4,ENCRYPTED', + 'DEK-Info: AES-128-CBC,D6218FEC397F57918B06DC3F9EC33487', + '', + 'OjtbW9WChM2baMDRXW/IYhmqAmlaulb8pQd0rDcFYGrAS/5PnA96R3X6f9tdooSv', + 'uPrQ37oCI7FfDrRmVy6pn6QOLxLZHk7LViBAA7q8VytRHUyezauicTkhc6/nfxQU', + 'tYN5dNajSl/aFuv/IitaYtuG2ORvXLLJhR3sYe5fs2ai14qtULdWxtrZ4cACXPSz', + 'Ij29/lUfeBIYxNFFDF5hgsyO+jHLHO8bJCgXLCEb1o/Im68kQT3LjmvLbdhVM2+V', + '5c+YVDW/yXKpaSDNqctJFsETP3SsI5k9PVijSeND4U27W9HAoukJmG8jNBXmMOKW', + 'MtOEp0Yw5+fu0OPTACMcK7UoiN90RTFLNcDT9kKjeQwqk37sZBNyOpnQ57Mu6YWX', + 'BS5nnT4bK/WpyWUgGJ+MVzcp6pErBUy3SxlV2LN/LIZq+YzME55sygtwNSuVhpBH', + '+9UwrUIlaT8Ru//OdRKY5Vp8AqomjlAzCkBxnmC9XhVB0r4zqsqgVyzSfsX+oGLe', + 'dbixpbeMXe3WoAowx6LmqojuP46MIL80/9BH6Xr51UqJj156bYXashJtgpYaAQIn', + '8YB9sXf4S027sgHxK0Xanw==', + '-----END DSA PRIVATE KEY-----' + ].join('\n'), + expected: { + fulltype: undefined, + type: 'dss', + curve: undefined, + extra: [ 'D6218FEC397F57918B06DC3F9EC33487' ], + comment: undefined, + encryption: 'aes-128-cbc', + private: new Buffer([ + 0x3a, 0x3b, 0x5b, 0x5b, 0xd5, 0x82, 0x84, 0xcd, 0x9b, 0x68, 0xc0, 0xd1, + 0x5d, 0x6f, 0xc8, 0x62, 0x19, 0xaa, 0x02, 0x69, 0x5a, 0xba, 0x56, 0xfc, + 0xa5, 0x07, 0x74, 0xac, 0x37, 0x05, 0x60, 0x6a, 0xc0, 0x4b, 0xfe, 0x4f, + 0x9c, 0x0f, 0x7a, 0x47, 0x75, 0xfa, 0x7f, 0xdb, 0x5d, 0xa2, 0x84, 0xaf, + 0xb8, 0xfa, 0xd0, 0xdf, 0xba, 0x02, 0x23, 0xb1, 0x5f, 0x0e, 0xb4, 0x66, + 0x57, 0x2e, 0xa9, 0x9f, 0xa4, 0x0e, 0x2f, 0x12, 0xd9, 0x1e, 0x4e, 0xcb, + 0x56, 0x20, 0x40, 0x03, 0xba, 0xbc, 0x57, 0x2b, 0x51, 0x1d, 0x4c, 0x9e, + 0xcd, 0xab, 0xa2, 0x71, 0x39, 0x21, 0x73, 0xaf, 0xe7, 0x7f, 0x14, 0x14, + 0xb5, 0x83, 0x79, 0x74, 0xd6, 0xa3, 0x4a, 0x5f, 0xda, 0x16, 0xeb, 0xff, + 0x22, 0x2b, 0x5a, 0x62, 0xdb, 0x86, 0xd8, 0xe4, 0x6f, 0x5c, 0xb2, 0xc9, + 0x85, 0x1d, 0xec, 0x61, 0xee, 0x5f, 0xb3, 0x66, 0xa2, 0xd7, 0x8a, 0xad, + 0x50, 0xb7, 0x56, 0xc6, 0xda, 0xd9, 0xe1, 0xc0, 0x02, 0x5c, 0xf4, 0xb3, + 0x22, 0x3d, 0xbd, 0xfe, 0x55, 0x1f, 0x78, 0x12, 0x18, 0xc4, 0xd1, 0x45, + 0x0c, 0x5e, 0x61, 0x82, 0xcc, 0x8e, 0xfa, 0x31, 0xcb, 0x1c, 0xef, 0x1b, + 0x24, 0x28, 0x17, 0x2c, 0x21, 0x1b, 0xd6, 0x8f, 0xc8, 0x9b, 0xaf, 0x24, + 0x41, 0x3d, 0xcb, 0x8e, 0x6b, 0xcb, 0x6d, 0xd8, 0x55, 0x33, 0x6f, 0x95, + 0xe5, 0xcf, 0x98, 0x54, 0x35, 0xbf, 0xc9, 0x72, 0xa9, 0x69, 0x20, 0xcd, + 0xa9, 0xcb, 0x49, 0x16, 0xc1, 0x13, 0x3f, 0x74, 0xac, 0x23, 0x99, 0x3d, + 0x3d, 0x58, 0xa3, 0x49, 0xe3, 0x43, 0xe1, 0x4d, 0xbb, 0x5b, 0xd1, 0xc0, + 0xa2, 0xe9, 0x09, 0x98, 0x6f, 0x23, 0x34, 0x15, 0xe6, 0x30, 0xe2, 0x96, + 0x32, 0xd3, 0x84, 0xa7, 0x46, 0x30, 0xe7, 0xe7, 0xee, 0xd0, 0xe3, 0xd3, + 0x00, 0x23, 0x1c, 0x2b, 0xb5, 0x28, 0x88, 0xdf, 0x74, 0x45, 0x31, 0x4b, + 0x35, 0xc0, 0xd3, 0xf6, 0x42, 0xa3, 0x79, 0x0c, 0x2a, 0x93, 0x7e, 0xec, + 0x64, 0x13, 0x72, 0x3a, 0x99, 0xd0, 0xe7, 0xb3, 0x2e, 0xe9, 0x85, 0x97, + 0x05, 0x2e, 0x67, 0x9d, 0x3e, 0x1b, 0x2b, 0xf5, 0xa9, 0xc9, 0x65, 0x20, + 0x18, 0x9f, 0x8c, 0x57, 0x37, 0x29, 0xea, 0x91, 0x2b, 0x05, 0x4c, 0xb7, + 0x4b, 0x19, 0x55, 0xd8, 0xb3, 0x7f, 0x2c, 0x86, 0x6a, 0xf9, 0x8c, 0xcc, + 0x13, 0x9e, 0x6c, 0xca, 0x0b, 0x70, 0x35, 0x2b, 0x95, 0x86, 0x90, 0x47, + 0xfb, 0xd5, 0x30, 0xad, 0x42, 0x25, 0x69, 0x3f, 0x11, 0xbb, 0xff, 0xce, + 0x75, 0x12, 0x98, 0xe5, 0x5a, 0x7c, 0x02, 0xaa, 0x26, 0x8e, 0x50, 0x33, + 0x0a, 0x40, 0x71, 0x9e, 0x60, 0xbd, 0x5e, 0x15, 0x41, 0xd2, 0xbe, 0x33, + 0xaa, 0xca, 0xa0, 0x57, 0x2c, 0xd2, 0x7e, 0xc5, 0xfe, 0xa0, 0x62, 0xde, + 0x75, 0xb8, 0xb1, 0xa5, 0xb7, 0x8c, 0x5d, 0xed, 0xd6, 0xa0, 0x0a, 0x30, + 0xc7, 0xa2, 0xe6, 0xaa, 0x88, 0xee, 0x3f, 0x8e, 0x8c, 0x20, 0xbf, 0x34, + 0xff, 0xd0, 0x47, 0xe9, 0x7a, 0xf9, 0xd5, 0x4a, 0x89, 0x8f, 0x5e, 0x7a, + 0x6d, 0x85, 0xda, 0xb2, 0x12, 0x6d, 0x82, 0x96, 0x1a, 0x01, 0x02, 0x27, + 0xf1, 0x80, 0x7d, 0xb1, 0x77, 0xf8, 0x4b, 0x4d, 0xbb, 0xb2, 0x01, 0xf1, + 0x2b, 0x45, 0xda, 0x9f, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x44, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x50, 0x72, 0x6f, 0x63, + 0x2d, 0x54, 0x79, 0x70, 0x65, 0x3a, 0x20, 0x34, 0x2c, 0x45, 0x4e, 0x43, + 0x52, 0x59, 0x50, 0x54, 0x45, 0x44, 0x0a, 0x44, 0x45, 0x4b, 0x2d, 0x49, + 0x6e, 0x66, 0x6f, 0x3a, 0x20, 0x41, 0x45, 0x53, 0x2d, 0x31, 0x32, 0x38, + 0x2d, 0x43, 0x42, 0x43, 0x2c, 0x44, 0x36, 0x32, 0x31, 0x38, 0x46, 0x45, + 0x43, 0x33, 0x39, 0x37, 0x46, 0x35, 0x37, 0x39, 0x31, 0x38, 0x42, 0x30, + 0x36, 0x44, 0x43, 0x33, 0x46, 0x39, 0x45, 0x43, 0x33, 0x33, 0x34, 0x38, + 0x37, 0x0a, 0x0a, 0x4f, 0x6a, 0x74, 0x62, 0x57, 0x39, 0x57, 0x43, 0x68, + 0x4d, 0x32, 0x62, 0x61, 0x4d, 0x44, 0x52, 0x58, 0x57, 0x2f, 0x49, 0x59, + 0x68, 0x6d, 0x71, 0x41, 0x6d, 0x6c, 0x61, 0x75, 0x6c, 0x62, 0x38, 0x70, + 0x51, 0x64, 0x30, 0x72, 0x44, 0x63, 0x46, 0x59, 0x47, 0x72, 0x41, 0x53, + 0x2f, 0x35, 0x50, 0x6e, 0x41, 0x39, 0x36, 0x52, 0x33, 0x58, 0x36, 0x66, + 0x39, 0x74, 0x64, 0x6f, 0x6f, 0x53, 0x76, 0x0a, 0x75, 0x50, 0x72, 0x51, + 0x33, 0x37, 0x6f, 0x43, 0x49, 0x37, 0x46, 0x66, 0x44, 0x72, 0x52, 0x6d, + 0x56, 0x79, 0x36, 0x70, 0x6e, 0x36, 0x51, 0x4f, 0x4c, 0x78, 0x4c, 0x5a, + 0x48, 0x6b, 0x37, 0x4c, 0x56, 0x69, 0x42, 0x41, 0x41, 0x37, 0x71, 0x38, + 0x56, 0x79, 0x74, 0x52, 0x48, 0x55, 0x79, 0x65, 0x7a, 0x61, 0x75, 0x69, + 0x63, 0x54, 0x6b, 0x68, 0x63, 0x36, 0x2f, 0x6e, 0x66, 0x78, 0x51, 0x55, + 0x0a, 0x74, 0x59, 0x4e, 0x35, 0x64, 0x4e, 0x61, 0x6a, 0x53, 0x6c, 0x2f, + 0x61, 0x46, 0x75, 0x76, 0x2f, 0x49, 0x69, 0x74, 0x61, 0x59, 0x74, 0x75, + 0x47, 0x32, 0x4f, 0x52, 0x76, 0x58, 0x4c, 0x4c, 0x4a, 0x68, 0x52, 0x33, + 0x73, 0x59, 0x65, 0x35, 0x66, 0x73, 0x32, 0x61, 0x69, 0x31, 0x34, 0x71, + 0x74, 0x55, 0x4c, 0x64, 0x57, 0x78, 0x74, 0x72, 0x5a, 0x34, 0x63, 0x41, + 0x43, 0x58, 0x50, 0x53, 0x7a, 0x0a, 0x49, 0x6a, 0x32, 0x39, 0x2f, 0x6c, + 0x55, 0x66, 0x65, 0x42, 0x49, 0x59, 0x78, 0x4e, 0x46, 0x46, 0x44, 0x46, + 0x35, 0x68, 0x67, 0x73, 0x79, 0x4f, 0x2b, 0x6a, 0x48, 0x4c, 0x48, 0x4f, + 0x38, 0x62, 0x4a, 0x43, 0x67, 0x58, 0x4c, 0x43, 0x45, 0x62, 0x31, 0x6f, + 0x2f, 0x49, 0x6d, 0x36, 0x38, 0x6b, 0x51, 0x54, 0x33, 0x4c, 0x6a, 0x6d, + 0x76, 0x4c, 0x62, 0x64, 0x68, 0x56, 0x4d, 0x32, 0x2b, 0x56, 0x0a, 0x35, + 0x63, 0x2b, 0x59, 0x56, 0x44, 0x57, 0x2f, 0x79, 0x58, 0x4b, 0x70, 0x61, + 0x53, 0x44, 0x4e, 0x71, 0x63, 0x74, 0x4a, 0x46, 0x73, 0x45, 0x54, 0x50, + 0x33, 0x53, 0x73, 0x49, 0x35, 0x6b, 0x39, 0x50, 0x56, 0x69, 0x6a, 0x53, + 0x65, 0x4e, 0x44, 0x34, 0x55, 0x32, 0x37, 0x57, 0x39, 0x48, 0x41, 0x6f, + 0x75, 0x6b, 0x4a, 0x6d, 0x47, 0x38, 0x6a, 0x4e, 0x42, 0x58, 0x6d, 0x4d, + 0x4f, 0x4b, 0x57, 0x0a, 0x4d, 0x74, 0x4f, 0x45, 0x70, 0x30, 0x59, 0x77, + 0x35, 0x2b, 0x66, 0x75, 0x30, 0x4f, 0x50, 0x54, 0x41, 0x43, 0x4d, 0x63, + 0x4b, 0x37, 0x55, 0x6f, 0x69, 0x4e, 0x39, 0x30, 0x52, 0x54, 0x46, 0x4c, + 0x4e, 0x63, 0x44, 0x54, 0x39, 0x6b, 0x4b, 0x6a, 0x65, 0x51, 0x77, 0x71, + 0x6b, 0x33, 0x37, 0x73, 0x5a, 0x42, 0x4e, 0x79, 0x4f, 0x70, 0x6e, 0x51, + 0x35, 0x37, 0x4d, 0x75, 0x36, 0x59, 0x57, 0x58, 0x0a, 0x42, 0x53, 0x35, + 0x6e, 0x6e, 0x54, 0x34, 0x62, 0x4b, 0x2f, 0x57, 0x70, 0x79, 0x57, 0x55, + 0x67, 0x47, 0x4a, 0x2b, 0x4d, 0x56, 0x7a, 0x63, 0x70, 0x36, 0x70, 0x45, + 0x72, 0x42, 0x55, 0x79, 0x33, 0x53, 0x78, 0x6c, 0x56, 0x32, 0x4c, 0x4e, + 0x2f, 0x4c, 0x49, 0x5a, 0x71, 0x2b, 0x59, 0x7a, 0x4d, 0x45, 0x35, 0x35, + 0x73, 0x79, 0x67, 0x74, 0x77, 0x4e, 0x53, 0x75, 0x56, 0x68, 0x70, 0x42, + 0x48, 0x0a, 0x2b, 0x39, 0x55, 0x77, 0x72, 0x55, 0x49, 0x6c, 0x61, 0x54, + 0x38, 0x52, 0x75, 0x2f, 0x2f, 0x4f, 0x64, 0x52, 0x4b, 0x59, 0x35, 0x56, + 0x70, 0x38, 0x41, 0x71, 0x6f, 0x6d, 0x6a, 0x6c, 0x41, 0x7a, 0x43, 0x6b, + 0x42, 0x78, 0x6e, 0x6d, 0x43, 0x39, 0x58, 0x68, 0x56, 0x42, 0x30, 0x72, + 0x34, 0x7a, 0x71, 0x73, 0x71, 0x67, 0x56, 0x79, 0x7a, 0x53, 0x66, 0x73, + 0x58, 0x2b, 0x6f, 0x47, 0x4c, 0x65, 0x0a, 0x64, 0x62, 0x69, 0x78, 0x70, + 0x62, 0x65, 0x4d, 0x58, 0x65, 0x33, 0x57, 0x6f, 0x41, 0x6f, 0x77, 0x78, + 0x36, 0x4c, 0x6d, 0x71, 0x6f, 0x6a, 0x75, 0x50, 0x34, 0x36, 0x4d, 0x49, + 0x4c, 0x38, 0x30, 0x2f, 0x39, 0x42, 0x48, 0x36, 0x58, 0x72, 0x35, 0x31, + 0x55, 0x71, 0x4a, 0x6a, 0x31, 0x35, 0x36, 0x62, 0x59, 0x58, 0x61, 0x73, + 0x68, 0x4a, 0x74, 0x67, 0x70, 0x59, 0x61, 0x41, 0x51, 0x49, 0x6e, 0x0a, + 0x38, 0x59, 0x42, 0x39, 0x73, 0x58, 0x66, 0x34, 0x53, 0x30, 0x32, 0x37, + 0x73, 0x67, 0x48, 0x78, 0x4b, 0x30, 0x58, 0x61, 0x6e, 0x77, 0x3d, 0x3d, + 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x44, 0x53, + 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, 0x45, + 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + public: undefined, + publicOrig: undefined + }, + what: 'Encrypted DSA private key (OpenSSH format)' + }, + { source: 'ssh-dss AAAAB3NzaC1kc3MAAACBAL1FzdhtSYN22YgbInypxa//RyKiZAvWxNzDKNF0cYvabH6ESjdwdEcW96H7pHaKb69hoCHKZzfWmdpj93CcezGnorWvUaO4nAvbR+l/gH+AEYzHmJsECvM/nWnmPltwbvrPi/NmXCNf5aeqQlPbL44vZZjFRcCEwOomGY2FWUKJAAAAFQDBD5Y6VM8slL2I/HhqTmnoVFuGmQAAAIAUkwKajnUklN4CX/ZWEhIOZWCRa6ZKCosICBc/yC4RGJldqsXaM0PCmdy0dmcv0XuOZ4Nb92aiZnWdGRXTN5KJ405WVOQLBB3LpebA6qeyDGiVt/iPOxgJGZy0LIWiqWuzAAf199cQ2BkWmGTC6AAJi1depXC+KVAP+HwXOLjSUAAAAIBeoyKKEiVDQOpB0KAr5RLH3/+5BP+mR+52gTAbxZbxQPYyH7mqq2/PI1A0bjnvnLe45a/iVumoIjP/atMtVJ6w103Ex5JGMwVB7uZBWeyJ4m3MqQdPf1/+r+UCoOaL7stK3rWeTCydb82z34ejX0dZobAPGj2F7SyZy7jM8THgcw==', + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x00, + 0x00, 0x00, 0x81, 0x00, 0xbd, 0x45, 0xcd, 0xd8, 0x6d, 0x49, 0x83, 0x76, + 0xd9, 0x88, 0x1b, 0x22, 0x7c, 0xa9, 0xc5, 0xaf, 0xff, 0x47, 0x22, 0xa2, + 0x64, 0x0b, 0xd6, 0xc4, 0xdc, 0xc3, 0x28, 0xd1, 0x74, 0x71, 0x8b, 0xda, + 0x6c, 0x7e, 0x84, 0x4a, 0x37, 0x70, 0x74, 0x47, 0x16, 0xf7, 0xa1, 0xfb, + 0xa4, 0x76, 0x8a, 0x6f, 0xaf, 0x61, 0xa0, 0x21, 0xca, 0x67, 0x37, 0xd6, + 0x99, 0xda, 0x63, 0xf7, 0x70, 0x9c, 0x7b, 0x31, 0xa7, 0xa2, 0xb5, 0xaf, + 0x51, 0xa3, 0xb8, 0x9c, 0x0b, 0xdb, 0x47, 0xe9, 0x7f, 0x80, 0x7f, 0x80, + 0x11, 0x8c, 0xc7, 0x98, 0x9b, 0x04, 0x0a, 0xf3, 0x3f, 0x9d, 0x69, 0xe6, + 0x3e, 0x5b, 0x70, 0x6e, 0xfa, 0xcf, 0x8b, 0xf3, 0x66, 0x5c, 0x23, 0x5f, + 0xe5, 0xa7, 0xaa, 0x42, 0x53, 0xdb, 0x2f, 0x8e, 0x2f, 0x65, 0x98, 0xc5, + 0x45, 0xc0, 0x84, 0xc0, 0xea, 0x26, 0x19, 0x8d, 0x85, 0x59, 0x42, 0x89, + 0x00, 0x00, 0x00, 0x15, 0x00, 0xc1, 0x0f, 0x96, 0x3a, 0x54, 0xcf, 0x2c, + 0x94, 0xbd, 0x88, 0xfc, 0x78, 0x6a, 0x4e, 0x69, 0xe8, 0x54, 0x5b, 0x86, + 0x99, 0x00, 0x00, 0x00, 0x80, 0x14, 0x93, 0x02, 0x9a, 0x8e, 0x75, 0x24, + 0x94, 0xde, 0x02, 0x5f, 0xf6, 0x56, 0x12, 0x12, 0x0e, 0x65, 0x60, 0x91, + 0x6b, 0xa6, 0x4a, 0x0a, 0x8b, 0x08, 0x08, 0x17, 0x3f, 0xc8, 0x2e, 0x11, + 0x18, 0x99, 0x5d, 0xaa, 0xc5, 0xda, 0x33, 0x43, 0xc2, 0x99, 0xdc, 0xb4, + 0x76, 0x67, 0x2f, 0xd1, 0x7b, 0x8e, 0x67, 0x83, 0x5b, 0xf7, 0x66, 0xa2, + 0x66, 0x75, 0x9d, 0x19, 0x15, 0xd3, 0x37, 0x92, 0x89, 0xe3, 0x4e, 0x56, + 0x54, 0xe4, 0x0b, 0x04, 0x1d, 0xcb, 0xa5, 0xe6, 0xc0, 0xea, 0xa7, 0xb2, + 0x0c, 0x68, 0x95, 0xb7, 0xf8, 0x8f, 0x3b, 0x18, 0x09, 0x19, 0x9c, 0xb4, + 0x2c, 0x85, 0xa2, 0xa9, 0x6b, 0xb3, 0x00, 0x07, 0xf5, 0xf7, 0xd7, 0x10, + 0xd8, 0x19, 0x16, 0x98, 0x64, 0xc2, 0xe8, 0x00, 0x09, 0x8b, 0x57, 0x5e, + 0xa5, 0x70, 0xbe, 0x29, 0x50, 0x0f, 0xf8, 0x7c, 0x17, 0x38, 0xb8, 0xd2, + 0x50, 0x00, 0x00, 0x00, 0x80, 0x5e, 0xa3, 0x22, 0x8a, 0x12, 0x25, 0x43, + 0x40, 0xea, 0x41, 0xd0, 0xa0, 0x2b, 0xe5, 0x12, 0xc7, 0xdf, 0xff, 0xb9, + 0x04, 0xff, 0xa6, 0x47, 0xee, 0x76, 0x81, 0x30, 0x1b, 0xc5, 0x96, 0xf1, + 0x40, 0xf6, 0x32, 0x1f, 0xb9, 0xaa, 0xab, 0x6f, 0xcf, 0x23, 0x50, 0x34, + 0x6e, 0x39, 0xef, 0x9c, 0xb7, 0xb8, 0xe5, 0xaf, 0xe2, 0x56, 0xe9, 0xa8, + 0x22, 0x33, 0xff, 0x6a, 0xd3, 0x2d, 0x54, 0x9e, 0xb0, 0xd7, 0x4d, 0xc4, + 0xc7, 0x92, 0x46, 0x33, 0x05, 0x41, 0xee, 0xe6, 0x41, 0x59, 0xec, 0x89, + 0xe2, 0x6d, 0xcc, 0xa9, 0x07, 0x4f, 0x7f, 0x5f, 0xfe, 0xaf, 0xe5, 0x02, + 0xa0, 0xe6, 0x8b, 0xee, 0xcb, 0x4a, 0xde, 0xb5, 0x9e, 0x4c, 0x2c, 0x9d, + 0x6f, 0xcd, 0xb3, 0xdf, 0x87, 0xa3, 0x5f, 0x47, 0x59, 0xa1, 0xb0, 0x0f, + 0x1a, 0x3d, 0x85, 0xed, 0x2c, 0x99, 0xcb, 0xb8, 0xcc, 0xf1, 0x31, 0xe0, + 0x73, + ]), + publicOrig: new Buffer([ + 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x20, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x6b, 0x63, 0x33, 0x4d, 0x41, + 0x41, 0x41, 0x43, 0x42, 0x41, 0x4c, 0x31, 0x46, 0x7a, 0x64, 0x68, 0x74, + 0x53, 0x59, 0x4e, 0x32, 0x32, 0x59, 0x67, 0x62, 0x49, 0x6e, 0x79, 0x70, + 0x78, 0x61, 0x2f, 0x2f, 0x52, 0x79, 0x4b, 0x69, 0x5a, 0x41, 0x76, 0x57, + 0x78, 0x4e, 0x7a, 0x44, 0x4b, 0x4e, 0x46, 0x30, 0x63, 0x59, 0x76, 0x61, + 0x62, 0x48, 0x36, 0x45, 0x53, 0x6a, 0x64, 0x77, 0x64, 0x45, 0x63, 0x57, + 0x39, 0x36, 0x48, 0x37, 0x70, 0x48, 0x61, 0x4b, 0x62, 0x36, 0x39, 0x68, + 0x6f, 0x43, 0x48, 0x4b, 0x5a, 0x7a, 0x66, 0x57, 0x6d, 0x64, 0x70, 0x6a, + 0x39, 0x33, 0x43, 0x63, 0x65, 0x7a, 0x47, 0x6e, 0x6f, 0x72, 0x57, 0x76, + 0x55, 0x61, 0x4f, 0x34, 0x6e, 0x41, 0x76, 0x62, 0x52, 0x2b, 0x6c, 0x2f, + 0x67, 0x48, 0x2b, 0x41, 0x45, 0x59, 0x7a, 0x48, 0x6d, 0x4a, 0x73, 0x45, + 0x43, 0x76, 0x4d, 0x2f, 0x6e, 0x57, 0x6e, 0x6d, 0x50, 0x6c, 0x74, 0x77, + 0x62, 0x76, 0x72, 0x50, 0x69, 0x2f, 0x4e, 0x6d, 0x58, 0x43, 0x4e, 0x66, + 0x35, 0x61, 0x65, 0x71, 0x51, 0x6c, 0x50, 0x62, 0x4c, 0x34, 0x34, 0x76, + 0x5a, 0x5a, 0x6a, 0x46, 0x52, 0x63, 0x43, 0x45, 0x77, 0x4f, 0x6f, 0x6d, + 0x47, 0x59, 0x32, 0x46, 0x57, 0x55, 0x4b, 0x4a, 0x41, 0x41, 0x41, 0x41, + 0x46, 0x51, 0x44, 0x42, 0x44, 0x35, 0x59, 0x36, 0x56, 0x4d, 0x38, 0x73, + 0x6c, 0x4c, 0x32, 0x49, 0x2f, 0x48, 0x68, 0x71, 0x54, 0x6d, 0x6e, 0x6f, + 0x56, 0x46, 0x75, 0x47, 0x6d, 0x51, 0x41, 0x41, 0x41, 0x49, 0x41, 0x55, + 0x6b, 0x77, 0x4b, 0x61, 0x6a, 0x6e, 0x55, 0x6b, 0x6c, 0x4e, 0x34, 0x43, + 0x58, 0x2f, 0x5a, 0x57, 0x45, 0x68, 0x49, 0x4f, 0x5a, 0x57, 0x43, 0x52, + 0x61, 0x36, 0x5a, 0x4b, 0x43, 0x6f, 0x73, 0x49, 0x43, 0x42, 0x63, 0x2f, + 0x79, 0x43, 0x34, 0x52, 0x47, 0x4a, 0x6c, 0x64, 0x71, 0x73, 0x58, 0x61, + 0x4d, 0x30, 0x50, 0x43, 0x6d, 0x64, 0x79, 0x30, 0x64, 0x6d, 0x63, 0x76, + 0x30, 0x58, 0x75, 0x4f, 0x5a, 0x34, 0x4e, 0x62, 0x39, 0x32, 0x61, 0x69, + 0x5a, 0x6e, 0x57, 0x64, 0x47, 0x52, 0x58, 0x54, 0x4e, 0x35, 0x4b, 0x4a, + 0x34, 0x30, 0x35, 0x57, 0x56, 0x4f, 0x51, 0x4c, 0x42, 0x42, 0x33, 0x4c, + 0x70, 0x65, 0x62, 0x41, 0x36, 0x71, 0x65, 0x79, 0x44, 0x47, 0x69, 0x56, + 0x74, 0x2f, 0x69, 0x50, 0x4f, 0x78, 0x67, 0x4a, 0x47, 0x5a, 0x79, 0x30, + 0x4c, 0x49, 0x57, 0x69, 0x71, 0x57, 0x75, 0x7a, 0x41, 0x41, 0x66, 0x31, + 0x39, 0x39, 0x63, 0x51, 0x32, 0x42, 0x6b, 0x57, 0x6d, 0x47, 0x54, 0x43, + 0x36, 0x41, 0x41, 0x4a, 0x69, 0x31, 0x64, 0x65, 0x70, 0x58, 0x43, 0x2b, + 0x4b, 0x56, 0x41, 0x50, 0x2b, 0x48, 0x77, 0x58, 0x4f, 0x4c, 0x6a, 0x53, + 0x55, 0x41, 0x41, 0x41, 0x41, 0x49, 0x42, 0x65, 0x6f, 0x79, 0x4b, 0x4b, + 0x45, 0x69, 0x56, 0x44, 0x51, 0x4f, 0x70, 0x42, 0x30, 0x4b, 0x41, 0x72, + 0x35, 0x52, 0x4c, 0x48, 0x33, 0x2f, 0x2b, 0x35, 0x42, 0x50, 0x2b, 0x6d, + 0x52, 0x2b, 0x35, 0x32, 0x67, 0x54, 0x41, 0x62, 0x78, 0x5a, 0x62, 0x78, + 0x51, 0x50, 0x59, 0x79, 0x48, 0x37, 0x6d, 0x71, 0x71, 0x32, 0x2f, 0x50, + 0x49, 0x31, 0x41, 0x30, 0x62, 0x6a, 0x6e, 0x76, 0x6e, 0x4c, 0x65, 0x34, + 0x35, 0x61, 0x2f, 0x69, 0x56, 0x75, 0x6d, 0x6f, 0x49, 0x6a, 0x50, 0x2f, + 0x61, 0x74, 0x4d, 0x74, 0x56, 0x4a, 0x36, 0x77, 0x31, 0x30, 0x33, 0x45, + 0x78, 0x35, 0x4a, 0x47, 0x4d, 0x77, 0x56, 0x42, 0x37, 0x75, 0x5a, 0x42, + 0x57, 0x65, 0x79, 0x4a, 0x34, 0x6d, 0x33, 0x4d, 0x71, 0x51, 0x64, 0x50, + 0x66, 0x31, 0x2f, 0x2b, 0x72, 0x2b, 0x55, 0x43, 0x6f, 0x4f, 0x61, 0x4c, + 0x37, 0x73, 0x74, 0x4b, 0x33, 0x72, 0x57, 0x65, 0x54, 0x43, 0x79, 0x64, + 0x62, 0x38, 0x32, 0x7a, 0x33, 0x34, 0x65, 0x6a, 0x58, 0x30, 0x64, 0x5a, + 0x6f, 0x62, 0x41, 0x50, 0x47, 0x6a, 0x32, 0x46, 0x37, 0x53, 0x79, 0x5a, + 0x79, 0x37, 0x6a, 0x4d, 0x38, 0x54, 0x48, 0x67, 0x63, 0x77, 0x3d, 0x3d, + ]) + }, + what: 'DSA public key (OpenSSH format)' + }, + { source: 'ssh-dss AAAAB3NzaC1kc3MAAACBAILCaN5QbaErJBa0nNoCY2QvvgcSd7WMgIgQ2qPAPH3Sav+9SWZB0SlHP8QYmV/ntBufaRw9ZdIop6esZBoXmNXDxjcfwpJAzrT3qOMfHXeNcc7w/6dHp+2DlkWD3yzK07SheIZnGNtF1fpfrUvkHFA8UX96awWRLnYuSvWvAK9dAAAAFQCrfeadO0tCCwgnq97Skk0Ng/xV6wAAAIBcVYaMkjXNgfvcUS4pwMabY8rFNGbYo9Jgv7IU2LGFEzD6yFbp2t1sQw031EYlad4n3BMTrZdE6K2/SMi3f3yRr0Z6S6nVSbo7/kIT8/tGJAj9/8RHdCihrFUMaAJNzdk5dmbyaCzexLmLXlKfUVW9A1WOsASjeJBwpFrJ8deuVAAAAIAu/zdW1qxdUVd0b7sjWFFZpp3l6HgOXlZpaOYchIoybs7n2dkYwmvn9glNcLCvRqiZpm2oEZIg1dy8RaH5b7AqY2huwmv+hLZ1XkUFyS2Ae2AKux5rslmiKmvglbDY0Rng1Wj3r3/N0KcKke1g1ohEUaQfXnz7VVx2YoVxhT1dTQ== testing ssh2 from node.js', + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: 'testing ssh2 from node.js', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x00, + 0x00, 0x00, 0x81, 0x00, 0x82, 0xc2, 0x68, 0xde, 0x50, 0x6d, 0xa1, 0x2b, + 0x24, 0x16, 0xb4, 0x9c, 0xda, 0x02, 0x63, 0x64, 0x2f, 0xbe, 0x07, 0x12, + 0x77, 0xb5, 0x8c, 0x80, 0x88, 0x10, 0xda, 0xa3, 0xc0, 0x3c, 0x7d, 0xd2, + 0x6a, 0xff, 0xbd, 0x49, 0x66, 0x41, 0xd1, 0x29, 0x47, 0x3f, 0xc4, 0x18, + 0x99, 0x5f, 0xe7, 0xb4, 0x1b, 0x9f, 0x69, 0x1c, 0x3d, 0x65, 0xd2, 0x28, + 0xa7, 0xa7, 0xac, 0x64, 0x1a, 0x17, 0x98, 0xd5, 0xc3, 0xc6, 0x37, 0x1f, + 0xc2, 0x92, 0x40, 0xce, 0xb4, 0xf7, 0xa8, 0xe3, 0x1f, 0x1d, 0x77, 0x8d, + 0x71, 0xce, 0xf0, 0xff, 0xa7, 0x47, 0xa7, 0xed, 0x83, 0x96, 0x45, 0x83, + 0xdf, 0x2c, 0xca, 0xd3, 0xb4, 0xa1, 0x78, 0x86, 0x67, 0x18, 0xdb, 0x45, + 0xd5, 0xfa, 0x5f, 0xad, 0x4b, 0xe4, 0x1c, 0x50, 0x3c, 0x51, 0x7f, 0x7a, + 0x6b, 0x05, 0x91, 0x2e, 0x76, 0x2e, 0x4a, 0xf5, 0xaf, 0x00, 0xaf, 0x5d, + 0x00, 0x00, 0x00, 0x15, 0x00, 0xab, 0x7d, 0xe6, 0x9d, 0x3b, 0x4b, 0x42, + 0x0b, 0x08, 0x27, 0xab, 0xde, 0xd2, 0x92, 0x4d, 0x0d, 0x83, 0xfc, 0x55, + 0xeb, 0x00, 0x00, 0x00, 0x80, 0x5c, 0x55, 0x86, 0x8c, 0x92, 0x35, 0xcd, + 0x81, 0xfb, 0xdc, 0x51, 0x2e, 0x29, 0xc0, 0xc6, 0x9b, 0x63, 0xca, 0xc5, + 0x34, 0x66, 0xd8, 0xa3, 0xd2, 0x60, 0xbf, 0xb2, 0x14, 0xd8, 0xb1, 0x85, + 0x13, 0x30, 0xfa, 0xc8, 0x56, 0xe9, 0xda, 0xdd, 0x6c, 0x43, 0x0d, 0x37, + 0xd4, 0x46, 0x25, 0x69, 0xde, 0x27, 0xdc, 0x13, 0x13, 0xad, 0x97, 0x44, + 0xe8, 0xad, 0xbf, 0x48, 0xc8, 0xb7, 0x7f, 0x7c, 0x91, 0xaf, 0x46, 0x7a, + 0x4b, 0xa9, 0xd5, 0x49, 0xba, 0x3b, 0xfe, 0x42, 0x13, 0xf3, 0xfb, 0x46, + 0x24, 0x08, 0xfd, 0xff, 0xc4, 0x47, 0x74, 0x28, 0xa1, 0xac, 0x55, 0x0c, + 0x68, 0x02, 0x4d, 0xcd, 0xd9, 0x39, 0x76, 0x66, 0xf2, 0x68, 0x2c, 0xde, + 0xc4, 0xb9, 0x8b, 0x5e, 0x52, 0x9f, 0x51, 0x55, 0xbd, 0x03, 0x55, 0x8e, + 0xb0, 0x04, 0xa3, 0x78, 0x90, 0x70, 0xa4, 0x5a, 0xc9, 0xf1, 0xd7, 0xae, + 0x54, 0x00, 0x00, 0x00, 0x80, 0x2e, 0xff, 0x37, 0x56, 0xd6, 0xac, 0x5d, + 0x51, 0x57, 0x74, 0x6f, 0xbb, 0x23, 0x58, 0x51, 0x59, 0xa6, 0x9d, 0xe5, + 0xe8, 0x78, 0x0e, 0x5e, 0x56, 0x69, 0x68, 0xe6, 0x1c, 0x84, 0x8a, 0x32, + 0x6e, 0xce, 0xe7, 0xd9, 0xd9, 0x18, 0xc2, 0x6b, 0xe7, 0xf6, 0x09, 0x4d, + 0x70, 0xb0, 0xaf, 0x46, 0xa8, 0x99, 0xa6, 0x6d, 0xa8, 0x11, 0x92, 0x20, + 0xd5, 0xdc, 0xbc, 0x45, 0xa1, 0xf9, 0x6f, 0xb0, 0x2a, 0x63, 0x68, 0x6e, + 0xc2, 0x6b, 0xfe, 0x84, 0xb6, 0x75, 0x5e, 0x45, 0x05, 0xc9, 0x2d, 0x80, + 0x7b, 0x60, 0x0a, 0xbb, 0x1e, 0x6b, 0xb2, 0x59, 0xa2, 0x2a, 0x6b, 0xe0, + 0x95, 0xb0, 0xd8, 0xd1, 0x19, 0xe0, 0xd5, 0x68, 0xf7, 0xaf, 0x7f, 0xcd, + 0xd0, 0xa7, 0x0a, 0x91, 0xed, 0x60, 0xd6, 0x88, 0x44, 0x51, 0xa4, 0x1f, + 0x5e, 0x7c, 0xfb, 0x55, 0x5c, 0x76, 0x62, 0x85, 0x71, 0x85, 0x3d, 0x5d, + 0x4d, + ]), + publicOrig: new Buffer([ + 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x20, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x6b, 0x63, 0x33, 0x4d, 0x41, + 0x41, 0x41, 0x43, 0x42, 0x41, 0x49, 0x4c, 0x43, 0x61, 0x4e, 0x35, 0x51, + 0x62, 0x61, 0x45, 0x72, 0x4a, 0x42, 0x61, 0x30, 0x6e, 0x4e, 0x6f, 0x43, + 0x59, 0x32, 0x51, 0x76, 0x76, 0x67, 0x63, 0x53, 0x64, 0x37, 0x57, 0x4d, + 0x67, 0x49, 0x67, 0x51, 0x32, 0x71, 0x50, 0x41, 0x50, 0x48, 0x33, 0x53, + 0x61, 0x76, 0x2b, 0x39, 0x53, 0x57, 0x5a, 0x42, 0x30, 0x53, 0x6c, 0x48, + 0x50, 0x38, 0x51, 0x59, 0x6d, 0x56, 0x2f, 0x6e, 0x74, 0x42, 0x75, 0x66, + 0x61, 0x52, 0x77, 0x39, 0x5a, 0x64, 0x49, 0x6f, 0x70, 0x36, 0x65, 0x73, + 0x5a, 0x42, 0x6f, 0x58, 0x6d, 0x4e, 0x58, 0x44, 0x78, 0x6a, 0x63, 0x66, + 0x77, 0x70, 0x4a, 0x41, 0x7a, 0x72, 0x54, 0x33, 0x71, 0x4f, 0x4d, 0x66, + 0x48, 0x58, 0x65, 0x4e, 0x63, 0x63, 0x37, 0x77, 0x2f, 0x36, 0x64, 0x48, + 0x70, 0x2b, 0x32, 0x44, 0x6c, 0x6b, 0x57, 0x44, 0x33, 0x79, 0x7a, 0x4b, + 0x30, 0x37, 0x53, 0x68, 0x65, 0x49, 0x5a, 0x6e, 0x47, 0x4e, 0x74, 0x46, + 0x31, 0x66, 0x70, 0x66, 0x72, 0x55, 0x76, 0x6b, 0x48, 0x46, 0x41, 0x38, + 0x55, 0x58, 0x39, 0x36, 0x61, 0x77, 0x57, 0x52, 0x4c, 0x6e, 0x59, 0x75, + 0x53, 0x76, 0x57, 0x76, 0x41, 0x4b, 0x39, 0x64, 0x41, 0x41, 0x41, 0x41, + 0x46, 0x51, 0x43, 0x72, 0x66, 0x65, 0x61, 0x64, 0x4f, 0x30, 0x74, 0x43, + 0x43, 0x77, 0x67, 0x6e, 0x71, 0x39, 0x37, 0x53, 0x6b, 0x6b, 0x30, 0x4e, + 0x67, 0x2f, 0x78, 0x56, 0x36, 0x77, 0x41, 0x41, 0x41, 0x49, 0x42, 0x63, + 0x56, 0x59, 0x61, 0x4d, 0x6b, 0x6a, 0x58, 0x4e, 0x67, 0x66, 0x76, 0x63, + 0x55, 0x53, 0x34, 0x70, 0x77, 0x4d, 0x61, 0x62, 0x59, 0x38, 0x72, 0x46, + 0x4e, 0x47, 0x62, 0x59, 0x6f, 0x39, 0x4a, 0x67, 0x76, 0x37, 0x49, 0x55, + 0x32, 0x4c, 0x47, 0x46, 0x45, 0x7a, 0x44, 0x36, 0x79, 0x46, 0x62, 0x70, + 0x32, 0x74, 0x31, 0x73, 0x51, 0x77, 0x30, 0x33, 0x31, 0x45, 0x59, 0x6c, + 0x61, 0x64, 0x34, 0x6e, 0x33, 0x42, 0x4d, 0x54, 0x72, 0x5a, 0x64, 0x45, + 0x36, 0x4b, 0x32, 0x2f, 0x53, 0x4d, 0x69, 0x33, 0x66, 0x33, 0x79, 0x52, + 0x72, 0x30, 0x5a, 0x36, 0x53, 0x36, 0x6e, 0x56, 0x53, 0x62, 0x6f, 0x37, + 0x2f, 0x6b, 0x49, 0x54, 0x38, 0x2f, 0x74, 0x47, 0x4a, 0x41, 0x6a, 0x39, + 0x2f, 0x38, 0x52, 0x48, 0x64, 0x43, 0x69, 0x68, 0x72, 0x46, 0x55, 0x4d, + 0x61, 0x41, 0x4a, 0x4e, 0x7a, 0x64, 0x6b, 0x35, 0x64, 0x6d, 0x62, 0x79, + 0x61, 0x43, 0x7a, 0x65, 0x78, 0x4c, 0x6d, 0x4c, 0x58, 0x6c, 0x4b, 0x66, + 0x55, 0x56, 0x57, 0x39, 0x41, 0x31, 0x57, 0x4f, 0x73, 0x41, 0x53, 0x6a, + 0x65, 0x4a, 0x42, 0x77, 0x70, 0x46, 0x72, 0x4a, 0x38, 0x64, 0x65, 0x75, + 0x56, 0x41, 0x41, 0x41, 0x41, 0x49, 0x41, 0x75, 0x2f, 0x7a, 0x64, 0x57, + 0x31, 0x71, 0x78, 0x64, 0x55, 0x56, 0x64, 0x30, 0x62, 0x37, 0x73, 0x6a, + 0x57, 0x46, 0x46, 0x5a, 0x70, 0x70, 0x33, 0x6c, 0x36, 0x48, 0x67, 0x4f, + 0x58, 0x6c, 0x5a, 0x70, 0x61, 0x4f, 0x59, 0x63, 0x68, 0x49, 0x6f, 0x79, + 0x62, 0x73, 0x37, 0x6e, 0x32, 0x64, 0x6b, 0x59, 0x77, 0x6d, 0x76, 0x6e, + 0x39, 0x67, 0x6c, 0x4e, 0x63, 0x4c, 0x43, 0x76, 0x52, 0x71, 0x69, 0x5a, + 0x70, 0x6d, 0x32, 0x6f, 0x45, 0x5a, 0x49, 0x67, 0x31, 0x64, 0x79, 0x38, + 0x52, 0x61, 0x48, 0x35, 0x62, 0x37, 0x41, 0x71, 0x59, 0x32, 0x68, 0x75, + 0x77, 0x6d, 0x76, 0x2b, 0x68, 0x4c, 0x5a, 0x31, 0x58, 0x6b, 0x55, 0x46, + 0x79, 0x53, 0x32, 0x41, 0x65, 0x32, 0x41, 0x4b, 0x75, 0x78, 0x35, 0x72, + 0x73, 0x6c, 0x6d, 0x69, 0x4b, 0x6d, 0x76, 0x67, 0x6c, 0x62, 0x44, 0x59, + 0x30, 0x52, 0x6e, 0x67, 0x31, 0x57, 0x6a, 0x33, 0x72, 0x33, 0x2f, 0x4e, + 0x30, 0x4b, 0x63, 0x4b, 0x6b, 0x65, 0x31, 0x67, 0x31, 0x6f, 0x68, 0x45, + 0x55, 0x61, 0x51, 0x66, 0x58, 0x6e, 0x7a, 0x37, 0x56, 0x56, 0x78, 0x32, + 0x59, 0x6f, 0x56, 0x78, 0x68, 0x54, 0x31, 0x64, 0x54, 0x51, 0x3d, 0x3d, + 0x20, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x20, 0x73, 0x73, 0x68, + 0x32, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x2e, + 0x6a, 0x73, + ]) + }, + what: 'DSA public key (OpenSSH format) with comment' + }, + { source: 'ssh-dss AAAAB3NzaC1kc3MAAACBAILCaN5QbaErJBa0nNoCY2QvvgcSd7WMgIgQ2qPAPH3Sav+9SWZB0SlHP8QYmV/ntBufaRw9ZdIop6esZBoXmNXDxjcfwpJAzrT3qOMfHXeNcc7w/6dHp+2DlkWD3yzK07SheIZnGNtF1fpfrUvkHFA8UX96awWRLnYuSvWvAK9dAAAAFQCrfeadO0tCCwgnq97Skk0Ng/xV6wAAAIBcVYaMkjXNgfvcUS4pwMabY8rFNGbYo9Jgv7IU2LGFEzD6yFbp2t1sQw031EYlad4n3BMTrZdE6K2/SMi3f3yRr0Z6S6nVSbo7/kIT8/tGJAj9/8RHdCihrFUMaAJNzdk5dmbyaCzexLmLXlKfUVW9A1WOsASjeJBwpFrJ8deuVAAAAIAu/zdW1qxdUVd0b7sjWFFZpp3l6HgOXlZpaOYchIoybs7n2dkYwmvn9glNcLCvRqiZpm2oEZIg1dy8RaH5b7AqY2huwmv+hLZ1XkUFyS2Ae2AKux5rslmiKmvglbDY0Rng1Wj3r3/N0KcKke1g1ohEUaQfXnz7VVx2YoVxhT1dTQ== ssh2test', + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: 'ssh2test', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x00, + 0x00, 0x00, 0x81, 0x00, 0x82, 0xc2, 0x68, 0xde, 0x50, 0x6d, 0xa1, 0x2b, + 0x24, 0x16, 0xb4, 0x9c, 0xda, 0x02, 0x63, 0x64, 0x2f, 0xbe, 0x07, 0x12, + 0x77, 0xb5, 0x8c, 0x80, 0x88, 0x10, 0xda, 0xa3, 0xc0, 0x3c, 0x7d, 0xd2, + 0x6a, 0xff, 0xbd, 0x49, 0x66, 0x41, 0xd1, 0x29, 0x47, 0x3f, 0xc4, 0x18, + 0x99, 0x5f, 0xe7, 0xb4, 0x1b, 0x9f, 0x69, 0x1c, 0x3d, 0x65, 0xd2, 0x28, + 0xa7, 0xa7, 0xac, 0x64, 0x1a, 0x17, 0x98, 0xd5, 0xc3, 0xc6, 0x37, 0x1f, + 0xc2, 0x92, 0x40, 0xce, 0xb4, 0xf7, 0xa8, 0xe3, 0x1f, 0x1d, 0x77, 0x8d, + 0x71, 0xce, 0xf0, 0xff, 0xa7, 0x47, 0xa7, 0xed, 0x83, 0x96, 0x45, 0x83, + 0xdf, 0x2c, 0xca, 0xd3, 0xb4, 0xa1, 0x78, 0x86, 0x67, 0x18, 0xdb, 0x45, + 0xd5, 0xfa, 0x5f, 0xad, 0x4b, 0xe4, 0x1c, 0x50, 0x3c, 0x51, 0x7f, 0x7a, + 0x6b, 0x05, 0x91, 0x2e, 0x76, 0x2e, 0x4a, 0xf5, 0xaf, 0x00, 0xaf, 0x5d, + 0x00, 0x00, 0x00, 0x15, 0x00, 0xab, 0x7d, 0xe6, 0x9d, 0x3b, 0x4b, 0x42, + 0x0b, 0x08, 0x27, 0xab, 0xde, 0xd2, 0x92, 0x4d, 0x0d, 0x83, 0xfc, 0x55, + 0xeb, 0x00, 0x00, 0x00, 0x80, 0x5c, 0x55, 0x86, 0x8c, 0x92, 0x35, 0xcd, + 0x81, 0xfb, 0xdc, 0x51, 0x2e, 0x29, 0xc0, 0xc6, 0x9b, 0x63, 0xca, 0xc5, + 0x34, 0x66, 0xd8, 0xa3, 0xd2, 0x60, 0xbf, 0xb2, 0x14, 0xd8, 0xb1, 0x85, + 0x13, 0x30, 0xfa, 0xc8, 0x56, 0xe9, 0xda, 0xdd, 0x6c, 0x43, 0x0d, 0x37, + 0xd4, 0x46, 0x25, 0x69, 0xde, 0x27, 0xdc, 0x13, 0x13, 0xad, 0x97, 0x44, + 0xe8, 0xad, 0xbf, 0x48, 0xc8, 0xb7, 0x7f, 0x7c, 0x91, 0xaf, 0x46, 0x7a, + 0x4b, 0xa9, 0xd5, 0x49, 0xba, 0x3b, 0xfe, 0x42, 0x13, 0xf3, 0xfb, 0x46, + 0x24, 0x08, 0xfd, 0xff, 0xc4, 0x47, 0x74, 0x28, 0xa1, 0xac, 0x55, 0x0c, + 0x68, 0x02, 0x4d, 0xcd, 0xd9, 0x39, 0x76, 0x66, 0xf2, 0x68, 0x2c, 0xde, + 0xc4, 0xb9, 0x8b, 0x5e, 0x52, 0x9f, 0x51, 0x55, 0xbd, 0x03, 0x55, 0x8e, + 0xb0, 0x04, 0xa3, 0x78, 0x90, 0x70, 0xa4, 0x5a, 0xc9, 0xf1, 0xd7, 0xae, + 0x54, 0x00, 0x00, 0x00, 0x80, 0x2e, 0xff, 0x37, 0x56, 0xd6, 0xac, 0x5d, + 0x51, 0x57, 0x74, 0x6f, 0xbb, 0x23, 0x58, 0x51, 0x59, 0xa6, 0x9d, 0xe5, + 0xe8, 0x78, 0x0e, 0x5e, 0x56, 0x69, 0x68, 0xe6, 0x1c, 0x84, 0x8a, 0x32, + 0x6e, 0xce, 0xe7, 0xd9, 0xd9, 0x18, 0xc2, 0x6b, 0xe7, 0xf6, 0x09, 0x4d, + 0x70, 0xb0, 0xaf, 0x46, 0xa8, 0x99, 0xa6, 0x6d, 0xa8, 0x11, 0x92, 0x20, + 0xd5, 0xdc, 0xbc, 0x45, 0xa1, 0xf9, 0x6f, 0xb0, 0x2a, 0x63, 0x68, 0x6e, + 0xc2, 0x6b, 0xfe, 0x84, 0xb6, 0x75, 0x5e, 0x45, 0x05, 0xc9, 0x2d, 0x80, + 0x7b, 0x60, 0x0a, 0xbb, 0x1e, 0x6b, 0xb2, 0x59, 0xa2, 0x2a, 0x6b, 0xe0, + 0x95, 0xb0, 0xd8, 0xd1, 0x19, 0xe0, 0xd5, 0x68, 0xf7, 0xaf, 0x7f, 0xcd, + 0xd0, 0xa7, 0x0a, 0x91, 0xed, 0x60, 0xd6, 0x88, 0x44, 0x51, 0xa4, 0x1f, + 0x5e, 0x7c, 0xfb, 0x55, 0x5c, 0x76, 0x62, 0x85, 0x71, 0x85, 0x3d, 0x5d, + 0x4d, + ]), + publicOrig: new Buffer([ + 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x20, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x6b, 0x63, 0x33, 0x4d, 0x41, + 0x41, 0x41, 0x43, 0x42, 0x41, 0x49, 0x4c, 0x43, 0x61, 0x4e, 0x35, 0x51, + 0x62, 0x61, 0x45, 0x72, 0x4a, 0x42, 0x61, 0x30, 0x6e, 0x4e, 0x6f, 0x43, + 0x59, 0x32, 0x51, 0x76, 0x76, 0x67, 0x63, 0x53, 0x64, 0x37, 0x57, 0x4d, + 0x67, 0x49, 0x67, 0x51, 0x32, 0x71, 0x50, 0x41, 0x50, 0x48, 0x33, 0x53, + 0x61, 0x76, 0x2b, 0x39, 0x53, 0x57, 0x5a, 0x42, 0x30, 0x53, 0x6c, 0x48, + 0x50, 0x38, 0x51, 0x59, 0x6d, 0x56, 0x2f, 0x6e, 0x74, 0x42, 0x75, 0x66, + 0x61, 0x52, 0x77, 0x39, 0x5a, 0x64, 0x49, 0x6f, 0x70, 0x36, 0x65, 0x73, + 0x5a, 0x42, 0x6f, 0x58, 0x6d, 0x4e, 0x58, 0x44, 0x78, 0x6a, 0x63, 0x66, + 0x77, 0x70, 0x4a, 0x41, 0x7a, 0x72, 0x54, 0x33, 0x71, 0x4f, 0x4d, 0x66, + 0x48, 0x58, 0x65, 0x4e, 0x63, 0x63, 0x37, 0x77, 0x2f, 0x36, 0x64, 0x48, + 0x70, 0x2b, 0x32, 0x44, 0x6c, 0x6b, 0x57, 0x44, 0x33, 0x79, 0x7a, 0x4b, + 0x30, 0x37, 0x53, 0x68, 0x65, 0x49, 0x5a, 0x6e, 0x47, 0x4e, 0x74, 0x46, + 0x31, 0x66, 0x70, 0x66, 0x72, 0x55, 0x76, 0x6b, 0x48, 0x46, 0x41, 0x38, + 0x55, 0x58, 0x39, 0x36, 0x61, 0x77, 0x57, 0x52, 0x4c, 0x6e, 0x59, 0x75, + 0x53, 0x76, 0x57, 0x76, 0x41, 0x4b, 0x39, 0x64, 0x41, 0x41, 0x41, 0x41, + 0x46, 0x51, 0x43, 0x72, 0x66, 0x65, 0x61, 0x64, 0x4f, 0x30, 0x74, 0x43, + 0x43, 0x77, 0x67, 0x6e, 0x71, 0x39, 0x37, 0x53, 0x6b, 0x6b, 0x30, 0x4e, + 0x67, 0x2f, 0x78, 0x56, 0x36, 0x77, 0x41, 0x41, 0x41, 0x49, 0x42, 0x63, + 0x56, 0x59, 0x61, 0x4d, 0x6b, 0x6a, 0x58, 0x4e, 0x67, 0x66, 0x76, 0x63, + 0x55, 0x53, 0x34, 0x70, 0x77, 0x4d, 0x61, 0x62, 0x59, 0x38, 0x72, 0x46, + 0x4e, 0x47, 0x62, 0x59, 0x6f, 0x39, 0x4a, 0x67, 0x76, 0x37, 0x49, 0x55, + 0x32, 0x4c, 0x47, 0x46, 0x45, 0x7a, 0x44, 0x36, 0x79, 0x46, 0x62, 0x70, + 0x32, 0x74, 0x31, 0x73, 0x51, 0x77, 0x30, 0x33, 0x31, 0x45, 0x59, 0x6c, + 0x61, 0x64, 0x34, 0x6e, 0x33, 0x42, 0x4d, 0x54, 0x72, 0x5a, 0x64, 0x45, + 0x36, 0x4b, 0x32, 0x2f, 0x53, 0x4d, 0x69, 0x33, 0x66, 0x33, 0x79, 0x52, + 0x72, 0x30, 0x5a, 0x36, 0x53, 0x36, 0x6e, 0x56, 0x53, 0x62, 0x6f, 0x37, + 0x2f, 0x6b, 0x49, 0x54, 0x38, 0x2f, 0x74, 0x47, 0x4a, 0x41, 0x6a, 0x39, + 0x2f, 0x38, 0x52, 0x48, 0x64, 0x43, 0x69, 0x68, 0x72, 0x46, 0x55, 0x4d, + 0x61, 0x41, 0x4a, 0x4e, 0x7a, 0x64, 0x6b, 0x35, 0x64, 0x6d, 0x62, 0x79, + 0x61, 0x43, 0x7a, 0x65, 0x78, 0x4c, 0x6d, 0x4c, 0x58, 0x6c, 0x4b, 0x66, + 0x55, 0x56, 0x57, 0x39, 0x41, 0x31, 0x57, 0x4f, 0x73, 0x41, 0x53, 0x6a, + 0x65, 0x4a, 0x42, 0x77, 0x70, 0x46, 0x72, 0x4a, 0x38, 0x64, 0x65, 0x75, + 0x56, 0x41, 0x41, 0x41, 0x41, 0x49, 0x41, 0x75, 0x2f, 0x7a, 0x64, 0x57, + 0x31, 0x71, 0x78, 0x64, 0x55, 0x56, 0x64, 0x30, 0x62, 0x37, 0x73, 0x6a, + 0x57, 0x46, 0x46, 0x5a, 0x70, 0x70, 0x33, 0x6c, 0x36, 0x48, 0x67, 0x4f, + 0x58, 0x6c, 0x5a, 0x70, 0x61, 0x4f, 0x59, 0x63, 0x68, 0x49, 0x6f, 0x79, + 0x62, 0x73, 0x37, 0x6e, 0x32, 0x64, 0x6b, 0x59, 0x77, 0x6d, 0x76, 0x6e, + 0x39, 0x67, 0x6c, 0x4e, 0x63, 0x4c, 0x43, 0x76, 0x52, 0x71, 0x69, 0x5a, + 0x70, 0x6d, 0x32, 0x6f, 0x45, 0x5a, 0x49, 0x67, 0x31, 0x64, 0x79, 0x38, + 0x52, 0x61, 0x48, 0x35, 0x62, 0x37, 0x41, 0x71, 0x59, 0x32, 0x68, 0x75, + 0x77, 0x6d, 0x76, 0x2b, 0x68, 0x4c, 0x5a, 0x31, 0x58, 0x6b, 0x55, 0x46, + 0x79, 0x53, 0x32, 0x41, 0x65, 0x32, 0x41, 0x4b, 0x75, 0x78, 0x35, 0x72, + 0x73, 0x6c, 0x6d, 0x69, 0x4b, 0x6d, 0x76, 0x67, 0x6c, 0x62, 0x44, 0x59, + 0x30, 0x52, 0x6e, 0x67, 0x31, 0x57, 0x6a, 0x33, 0x72, 0x33, 0x2f, 0x4e, + 0x30, 0x4b, 0x63, 0x4b, 0x6b, 0x65, 0x31, 0x67, 0x31, 0x6f, 0x68, 0x45, + 0x55, 0x61, 0x51, 0x66, 0x58, 0x6e, 0x7a, 0x37, 0x56, 0x56, 0x78, 0x32, + 0x59, 0x6f, 0x56, 0x78, 0x68, 0x54, 0x31, 0x64, 0x54, 0x51, 0x3d, 0x3d, + 0x20, 0x73, 0x73, 0x68, 0x32, 0x74, 0x65, 0x73, 0x74, + ]) + }, + what: 'DSA public key (OpenSSH format) with comment (no spaces)' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: "testing ssh2 from node.js"', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'testing ssh2 from node.js', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x67, 0x20, 0x73, 0x73, 0x68, 0x32, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, + 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6a, 0x73, 0x22, 0x0a, 0x41, 0x41, 0x41, + 0x41, 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, + 0x41, 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, + 0x41, 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, + 0x78, 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, + 0x72, 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, + 0x66, 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, 0x35, + 0x4f, 0x36, 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, + 0x65, 0x4f, 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, + 0x57, 0x6f, 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, + 0x72, 0x65, 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, + 0x76, 0x78, 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, + 0x52, 0x36, 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, 0x53, + 0x46, 0x33, 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, + 0x53, 0x30, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, 0x44, + 0x20, 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, + 0x20, 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with quoted comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: testing ssh2 from node.js', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'testing ssh2 from node.js', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, + 0x20, 0x73, 0x73, 0x68, 0x32, 0x20, 0x66, 0x72, 0x6f, 0x6d, 0x20, 0x6e, + 0x6f, 0x64, 0x65, 0x2e, 0x6a, 0x73, 0x0a, 0x41, 0x41, 0x41, 0x41, 0x42, + 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, 0x41, 0x41, + 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, 0x41, 0x59, + 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, 0x78, 0x77, + 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, 0x72, 0x62, + 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, 0x66, 0x2b, + 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, 0x35, 0x4f, 0x36, + 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, 0x65, 0x4f, + 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, 0x57, 0x6f, + 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, 0x72, 0x65, + 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, 0x76, 0x78, + 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, 0x52, 0x36, + 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, 0x53, 0x46, 0x33, + 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, 0x53, 0x30, + 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, 0x44, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with unquoted comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: "testing ssh2 \\', + 'from node.js"', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'testing ssh2 from node.js', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x22, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x67, 0x20, 0x73, 0x73, 0x68, 0x32, 0x20, 0x5c, 0x0a, 0x66, 0x72, 0x6f, + 0x6d, 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6a, 0x73, 0x22, 0x0a, 0x41, + 0x41, 0x41, 0x41, 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, + 0x32, 0x45, 0x41, 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, + 0x41, 0x41, 0x41, 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, + 0x68, 0x67, 0x78, 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, + 0x76, 0x41, 0x72, 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, + 0x63, 0x6c, 0x66, 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, + 0x52, 0x35, 0x4f, 0x36, 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, + 0x4f, 0x2b, 0x65, 0x4f, 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, + 0x5a, 0x7a, 0x57, 0x6f, 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, + 0x69, 0x39, 0x72, 0x65, 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, + 0x57, 0x46, 0x76, 0x78, 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, + 0x55, 0x50, 0x52, 0x36, 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, + 0x4c, 0x53, 0x46, 0x33, 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, + 0x2f, 0x72, 0x53, 0x30, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, + 0x4e, 0x44, 0x20, 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, + 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with multi-line quoted comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: testing ssh2 \\', + 'from node.js', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'testing ssh2 from node.js', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x74, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, + 0x20, 0x73, 0x73, 0x68, 0x32, 0x20, 0x5c, 0x0a, 0x66, 0x72, 0x6f, 0x6d, + 0x20, 0x6e, 0x6f, 0x64, 0x65, 0x2e, 0x6a, 0x73, 0x0a, 0x41, 0x41, 0x41, + 0x41, 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, + 0x41, 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, + 0x41, 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, + 0x78, 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, + 0x72, 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, + 0x66, 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, 0x35, + 0x4f, 0x36, 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, + 0x65, 0x4f, 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, + 0x57, 0x6f, 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, + 0x72, 0x65, 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, + 0x76, 0x78, 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, + 0x52, 0x36, 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, 0x53, + 0x46, 0x33, 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, + 0x53, 0x30, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, 0x44, + 0x20, 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, + 0x20, 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with multi-line unquoted comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: ""', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: '', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x22, 0x22, 0x0a, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, 0x41, + 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, 0x41, + 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, 0x78, + 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, 0x72, + 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, 0x66, + 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, 0x35, 0x4f, + 0x36, 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, 0x65, + 0x4f, 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, 0x57, + 0x6f, 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, 0x72, + 0x65, 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, 0x76, + 0x78, 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, 0x52, + 0x36, 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, 0x53, 0x46, + 0x33, 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, 0x53, + 0x30, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, 0x44, 0x20, + 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, + 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with empty quoted comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: ', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: '', + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x3a, 0x20, 0x0a, 0x41, 0x41, 0x41, 0x41, 0x42, 0x33, + 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, 0x41, 0x41, 0x41, + 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, 0x41, 0x59, 0x51, + 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, 0x78, 0x77, 0x47, + 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, 0x72, 0x62, 0x4e, + 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, 0x66, 0x2b, 0x6f, + 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, 0x35, 0x4f, 0x36, 0x72, + 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, 0x65, 0x4f, 0x63, + 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, 0x57, 0x6f, 0x2f, + 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, 0x72, 0x65, 0x54, + 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, 0x76, 0x78, 0x34, + 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, 0x52, 0x36, 0x68, + 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, 0x53, 0x46, 0x33, 0x6c, + 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, 0x53, 0x30, 0x3d, + 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, 0x44, 0x20, 0x53, 0x53, + 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, + 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with empty unquoted comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, 0x45, 0x41, + 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, 0x41, 0x41, + 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, 0x67, 0x78, + 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, 0x41, 0x72, + 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, 0x6c, 0x66, + 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, 0x35, 0x4f, + 0x36, 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, 0x2b, 0x65, + 0x4f, 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, 0x7a, 0x57, + 0x6f, 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, 0x39, 0x72, + 0x65, 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, 0x46, 0x76, + 0x78, 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, 0x50, 0x52, + 0x36, 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, 0x53, 0x46, + 0x33, 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, 0x72, 0x53, + 0x30, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, 0x44, 0x20, + 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, + 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with no comment' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + '', + '', + 'AAAAB3NzaC1yc2EAAAADAQABAAAAYQDl6dxL+hgxwGNhZrq18vArbNB0UqNxvclf+ociRL', + 'lnR5O6r/czAhySO+eOc5SF7wexZzWo/COQ37tEi9reTucYFNQQWFvx4E8CRFP+UPR6hQX0', + 'fxoLSF3lxQFJ+32/rS0=', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x03, 0x01, 0x00, 0x01, 0x00, 0x00, 0x00, 0x61, 0x00, + 0xe5, 0xe9, 0xdc, 0x4b, 0xfa, 0x18, 0x31, 0xc0, 0x63, 0x61, 0x66, 0xba, + 0xb5, 0xf2, 0xf0, 0x2b, 0x6c, 0xd0, 0x74, 0x52, 0xa3, 0x71, 0xbd, 0xc9, + 0x5f, 0xfa, 0x87, 0x22, 0x44, 0xb9, 0x67, 0x47, 0x93, 0xba, 0xaf, 0xf7, + 0x33, 0x02, 0x1c, 0x92, 0x3b, 0xe7, 0x8e, 0x73, 0x94, 0x85, 0xef, 0x07, + 0xb1, 0x67, 0x35, 0xa8, 0xfc, 0x23, 0x90, 0xdf, 0xbb, 0x44, 0x8b, 0xda, + 0xde, 0x4e, 0xe7, 0x18, 0x14, 0xd4, 0x10, 0x58, 0x5b, 0xf1, 0xe0, 0x4f, + 0x02, 0x44, 0x53, 0xfe, 0x50, 0xf4, 0x7a, 0x85, 0x05, 0xf4, 0x7f, 0x1a, + 0x0b, 0x48, 0x5d, 0xe5, 0xc5, 0x01, 0x49, 0xfb, 0x7d, 0xbf, 0xad, 0x2d, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x0a, 0x0a, 0x41, 0x41, + 0x41, 0x41, 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x79, 0x63, 0x32, + 0x45, 0x41, 0x41, 0x41, 0x41, 0x44, 0x41, 0x51, 0x41, 0x42, 0x41, 0x41, + 0x41, 0x41, 0x59, 0x51, 0x44, 0x6c, 0x36, 0x64, 0x78, 0x4c, 0x2b, 0x68, + 0x67, 0x78, 0x77, 0x47, 0x4e, 0x68, 0x5a, 0x72, 0x71, 0x31, 0x38, 0x76, + 0x41, 0x72, 0x62, 0x4e, 0x42, 0x30, 0x55, 0x71, 0x4e, 0x78, 0x76, 0x63, + 0x6c, 0x66, 0x2b, 0x6f, 0x63, 0x69, 0x52, 0x4c, 0x0a, 0x6c, 0x6e, 0x52, + 0x35, 0x4f, 0x36, 0x72, 0x2f, 0x63, 0x7a, 0x41, 0x68, 0x79, 0x53, 0x4f, + 0x2b, 0x65, 0x4f, 0x63, 0x35, 0x53, 0x46, 0x37, 0x77, 0x65, 0x78, 0x5a, + 0x7a, 0x57, 0x6f, 0x2f, 0x43, 0x4f, 0x51, 0x33, 0x37, 0x74, 0x45, 0x69, + 0x39, 0x72, 0x65, 0x54, 0x75, 0x63, 0x59, 0x46, 0x4e, 0x51, 0x51, 0x57, + 0x46, 0x76, 0x78, 0x34, 0x45, 0x38, 0x43, 0x52, 0x46, 0x50, 0x2b, 0x55, + 0x50, 0x52, 0x36, 0x68, 0x51, 0x58, 0x30, 0x0a, 0x66, 0x78, 0x6f, 0x4c, + 0x53, 0x46, 0x33, 0x6c, 0x78, 0x51, 0x46, 0x4a, 0x2b, 0x33, 0x32, 0x2f, + 0x72, 0x53, 0x30, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x45, 0x4e, + 0x44, 0x20, 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, + 0x43, 0x20, 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, + ]) + }, + what: 'RSA public key (RFC4716 format) with blank lines' + }, + { source: [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'AAAAB3NzaC1kc3MAAACBAI4CR+tgz/kZnbmKJjccZNWKqcifcxLy/pTMnRLh5SrzQ44R8T', + 'QSbQTNnJHVpc6ucW369+TtQRpRmr4iU65ttZkZqCJ1jawXAnxcpIrVq/xBfsXfCIr26LaG', + 'qNm//vYBAPs5j648Fhg6AHyM/LewPSnek3fE8gIB9gtCsPei5L7lAAAAFQDJ3k6TdJK/eo', + 'LgUqg2bUB94Mjg9wAAAIAp9Q3SyooklLGMPdG2kj0vMWF+cVtChcjGjemhPeHXyjD5/in8', + 'gWYFifVqoYKBJhAaPP5HpPtA3BEGgIkLVGRzCk3tnwXbutGbwt7PzutbJ9LKiiR7z1HLqn', + 'E+r0//6Nhqz/ZNQPWZRbcG0l1EIQNgjuZkSzJLLO15TSPZeVhZcQAAAIAUAYobyKQ3Rl4q', + 'XAcOP8uOt5q7YHMy43JmZJnTCQ7X95TuNv5kFj7a5RkDU9HjgzSg5aabVsJAD6wDy5/rm+', + 'kJA9uXGOvHfiToh+tRjDp2SrWDERfPCRDl8oTJr36p1zqFXERwTJsNJF32QClszhzk7NYv', + 'G6kcaQC8Hhm9yccRoA==', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'), + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: undefined, + encryption: undefined, + private: undefined, + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x81, 0x00, 0x8e, 0x02, 0x47, 0xeb, 0x60, 0xcf, 0xf9, + 0x19, 0x9d, 0xb9, 0x8a, 0x26, 0x37, 0x1c, 0x64, 0xd5, 0x8a, 0xa9, 0xc8, + 0x9f, 0x73, 0x12, 0xf2, 0xfe, 0x94, 0xcc, 0x9d, 0x12, 0xe1, 0xe5, 0x2a, + 0xf3, 0x43, 0x8e, 0x11, 0xf1, 0x34, 0x12, 0x6d, 0x04, 0xcd, 0x9c, 0x91, + 0xd5, 0xa5, 0xce, 0xae, 0x71, 0x6d, 0xfa, 0xf7, 0xe4, 0xed, 0x41, 0x1a, + 0x51, 0x9a, 0xbe, 0x22, 0x53, 0xae, 0x6d, 0xb5, 0x99, 0x19, 0xa8, 0x22, + 0x75, 0x8d, 0xac, 0x17, 0x02, 0x7c, 0x5c, 0xa4, 0x8a, 0xd5, 0xab, 0xfc, + 0x41, 0x7e, 0xc5, 0xdf, 0x08, 0x8a, 0xf6, 0xe8, 0xb6, 0x86, 0xa8, 0xd9, + 0xbf, 0xfe, 0xf6, 0x01, 0x00, 0xfb, 0x39, 0x8f, 0xae, 0x3c, 0x16, 0x18, + 0x3a, 0x00, 0x7c, 0x8c, 0xfc, 0xb7, 0xb0, 0x3d, 0x29, 0xde, 0x93, 0x77, + 0xc4, 0xf2, 0x02, 0x01, 0xf6, 0x0b, 0x42, 0xb0, 0xf7, 0xa2, 0xe4, 0xbe, + 0xe5, 0x00, 0x00, 0x00, 0x15, 0x00, 0xc9, 0xde, 0x4e, 0x93, 0x74, 0x92, + 0xbf, 0x7a, 0x82, 0xe0, 0x52, 0xa8, 0x36, 0x6d, 0x40, 0x7d, 0xe0, 0xc8, + 0xe0, 0xf7, 0x00, 0x00, 0x00, 0x80, 0x29, 0xf5, 0x0d, 0xd2, 0xca, 0x8a, + 0x24, 0x94, 0xb1, 0x8c, 0x3d, 0xd1, 0xb6, 0x92, 0x3d, 0x2f, 0x31, 0x61, + 0x7e, 0x71, 0x5b, 0x42, 0x85, 0xc8, 0xc6, 0x8d, 0xe9, 0xa1, 0x3d, 0xe1, + 0xd7, 0xca, 0x30, 0xf9, 0xfe, 0x29, 0xfc, 0x81, 0x66, 0x05, 0x89, 0xf5, + 0x6a, 0xa1, 0x82, 0x81, 0x26, 0x10, 0x1a, 0x3c, 0xfe, 0x47, 0xa4, 0xfb, + 0x40, 0xdc, 0x11, 0x06, 0x80, 0x89, 0x0b, 0x54, 0x64, 0x73, 0x0a, 0x4d, + 0xed, 0x9f, 0x05, 0xdb, 0xba, 0xd1, 0x9b, 0xc2, 0xde, 0xcf, 0xce, 0xeb, + 0x5b, 0x27, 0xd2, 0xca, 0x8a, 0x24, 0x7b, 0xcf, 0x51, 0xcb, 0xaa, 0x71, + 0x3e, 0xaf, 0x4f, 0xff, 0xe8, 0xd8, 0x6a, 0xcf, 0xf6, 0x4d, 0x40, 0xf5, + 0x99, 0x45, 0xb7, 0x06, 0xd2, 0x5d, 0x44, 0x21, 0x03, 0x60, 0x8e, 0xe6, + 0x64, 0x4b, 0x32, 0x4b, 0x2c, 0xed, 0x79, 0x4d, 0x23, 0xd9, 0x79, 0x58, + 0x59, 0x71, 0x00, 0x00, 0x00, 0x80, 0x14, 0x01, 0x8a, 0x1b, 0xc8, 0xa4, + 0x37, 0x46, 0x5e, 0x2a, 0x5c, 0x07, 0x0e, 0x3f, 0xcb, 0x8e, 0xb7, 0x9a, + 0xbb, 0x60, 0x73, 0x32, 0xe3, 0x72, 0x66, 0x64, 0x99, 0xd3, 0x09, 0x0e, + 0xd7, 0xf7, 0x94, 0xee, 0x36, 0xfe, 0x64, 0x16, 0x3e, 0xda, 0xe5, 0x19, + 0x03, 0x53, 0xd1, 0xe3, 0x83, 0x34, 0xa0, 0xe5, 0xa6, 0x9b, 0x56, 0xc2, + 0x40, 0x0f, 0xac, 0x03, 0xcb, 0x9f, 0xeb, 0x9b, 0xe9, 0x09, 0x03, 0xdb, + 0x97, 0x18, 0xeb, 0xc7, 0x7e, 0x24, 0xe8, 0x87, 0xeb, 0x51, 0x8c, 0x3a, + 0x76, 0x4a, 0xb5, 0x83, 0x11, 0x17, 0xcf, 0x09, 0x10, 0xe5, 0xf2, 0x84, + 0xc9, 0xaf, 0x7e, 0xa9, 0xd7, 0x3a, 0x85, 0x5c, 0x44, 0x70, 0x4c, 0x9b, + 0x0d, 0x24, 0x5d, 0xf6, 0x40, 0x29, 0x6c, 0xce, 0x1c, 0xe4, 0xec, 0xd6, + 0x2f, 0x1b, 0xa9, 0x1c, 0x69, 0x00, 0xbc, 0x1e, 0x19, 0xbd, 0xc9, 0xc7, + 0x11, 0xa0, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x20, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x53, + 0x53, 0x48, 0x32, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, + 0x45, 0x59, 0x20, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x41, 0x41, 0x41, 0x41, + 0x42, 0x33, 0x4e, 0x7a, 0x61, 0x43, 0x31, 0x6b, 0x63, 0x33, 0x4d, 0x41, + 0x41, 0x41, 0x43, 0x42, 0x41, 0x49, 0x34, 0x43, 0x52, 0x2b, 0x74, 0x67, + 0x7a, 0x2f, 0x6b, 0x5a, 0x6e, 0x62, 0x6d, 0x4b, 0x4a, 0x6a, 0x63, 0x63, + 0x5a, 0x4e, 0x57, 0x4b, 0x71, 0x63, 0x69, 0x66, 0x63, 0x78, 0x4c, 0x79, + 0x2f, 0x70, 0x54, 0x4d, 0x6e, 0x52, 0x4c, 0x68, 0x35, 0x53, 0x72, 0x7a, + 0x51, 0x34, 0x34, 0x52, 0x38, 0x54, 0x0a, 0x51, 0x53, 0x62, 0x51, 0x54, + 0x4e, 0x6e, 0x4a, 0x48, 0x56, 0x70, 0x63, 0x36, 0x75, 0x63, 0x57, 0x33, + 0x36, 0x39, 0x2b, 0x54, 0x74, 0x51, 0x52, 0x70, 0x52, 0x6d, 0x72, 0x34, + 0x69, 0x55, 0x36, 0x35, 0x74, 0x74, 0x5a, 0x6b, 0x5a, 0x71, 0x43, 0x4a, + 0x31, 0x6a, 0x61, 0x77, 0x58, 0x41, 0x6e, 0x78, 0x63, 0x70, 0x49, 0x72, + 0x56, 0x71, 0x2f, 0x78, 0x42, 0x66, 0x73, 0x58, 0x66, 0x43, 0x49, 0x72, + 0x32, 0x36, 0x4c, 0x61, 0x47, 0x0a, 0x71, 0x4e, 0x6d, 0x2f, 0x2f, 0x76, + 0x59, 0x42, 0x41, 0x50, 0x73, 0x35, 0x6a, 0x36, 0x34, 0x38, 0x46, 0x68, + 0x67, 0x36, 0x41, 0x48, 0x79, 0x4d, 0x2f, 0x4c, 0x65, 0x77, 0x50, 0x53, + 0x6e, 0x65, 0x6b, 0x33, 0x66, 0x45, 0x38, 0x67, 0x49, 0x42, 0x39, 0x67, + 0x74, 0x43, 0x73, 0x50, 0x65, 0x69, 0x35, 0x4c, 0x37, 0x6c, 0x41, 0x41, + 0x41, 0x41, 0x46, 0x51, 0x44, 0x4a, 0x33, 0x6b, 0x36, 0x54, 0x64, 0x4a, + 0x4b, 0x2f, 0x65, 0x6f, 0x0a, 0x4c, 0x67, 0x55, 0x71, 0x67, 0x32, 0x62, + 0x55, 0x42, 0x39, 0x34, 0x4d, 0x6a, 0x67, 0x39, 0x77, 0x41, 0x41, 0x41, + 0x49, 0x41, 0x70, 0x39, 0x51, 0x33, 0x53, 0x79, 0x6f, 0x6f, 0x6b, 0x6c, + 0x4c, 0x47, 0x4d, 0x50, 0x64, 0x47, 0x32, 0x6b, 0x6a, 0x30, 0x76, 0x4d, + 0x57, 0x46, 0x2b, 0x63, 0x56, 0x74, 0x43, 0x68, 0x63, 0x6a, 0x47, 0x6a, + 0x65, 0x6d, 0x68, 0x50, 0x65, 0x48, 0x58, 0x79, 0x6a, 0x44, 0x35, 0x2f, + 0x69, 0x6e, 0x38, 0x0a, 0x67, 0x57, 0x59, 0x46, 0x69, 0x66, 0x56, 0x71, + 0x6f, 0x59, 0x4b, 0x42, 0x4a, 0x68, 0x41, 0x61, 0x50, 0x50, 0x35, 0x48, + 0x70, 0x50, 0x74, 0x41, 0x33, 0x42, 0x45, 0x47, 0x67, 0x49, 0x6b, 0x4c, + 0x56, 0x47, 0x52, 0x7a, 0x43, 0x6b, 0x33, 0x74, 0x6e, 0x77, 0x58, 0x62, + 0x75, 0x74, 0x47, 0x62, 0x77, 0x74, 0x37, 0x50, 0x7a, 0x75, 0x74, 0x62, + 0x4a, 0x39, 0x4c, 0x4b, 0x69, 0x69, 0x52, 0x37, 0x7a, 0x31, 0x48, 0x4c, + 0x71, 0x6e, 0x0a, 0x45, 0x2b, 0x72, 0x30, 0x2f, 0x2f, 0x36, 0x4e, 0x68, + 0x71, 0x7a, 0x2f, 0x5a, 0x4e, 0x51, 0x50, 0x57, 0x5a, 0x52, 0x62, 0x63, + 0x47, 0x30, 0x6c, 0x31, 0x45, 0x49, 0x51, 0x4e, 0x67, 0x6a, 0x75, 0x5a, + 0x6b, 0x53, 0x7a, 0x4a, 0x4c, 0x4c, 0x4f, 0x31, 0x35, 0x54, 0x53, 0x50, + 0x5a, 0x65, 0x56, 0x68, 0x5a, 0x63, 0x51, 0x41, 0x41, 0x41, 0x49, 0x41, + 0x55, 0x41, 0x59, 0x6f, 0x62, 0x79, 0x4b, 0x51, 0x33, 0x52, 0x6c, 0x34, + 0x71, 0x0a, 0x58, 0x41, 0x63, 0x4f, 0x50, 0x38, 0x75, 0x4f, 0x74, 0x35, + 0x71, 0x37, 0x59, 0x48, 0x4d, 0x79, 0x34, 0x33, 0x4a, 0x6d, 0x5a, 0x4a, + 0x6e, 0x54, 0x43, 0x51, 0x37, 0x58, 0x39, 0x35, 0x54, 0x75, 0x4e, 0x76, + 0x35, 0x6b, 0x46, 0x6a, 0x37, 0x61, 0x35, 0x52, 0x6b, 0x44, 0x55, 0x39, + 0x48, 0x6a, 0x67, 0x7a, 0x53, 0x67, 0x35, 0x61, 0x61, 0x62, 0x56, 0x73, + 0x4a, 0x41, 0x44, 0x36, 0x77, 0x44, 0x79, 0x35, 0x2f, 0x72, 0x6d, 0x2b, + 0x0a, 0x6b, 0x4a, 0x41, 0x39, 0x75, 0x58, 0x47, 0x4f, 0x76, 0x48, 0x66, + 0x69, 0x54, 0x6f, 0x68, 0x2b, 0x74, 0x52, 0x6a, 0x44, 0x70, 0x32, 0x53, + 0x72, 0x57, 0x44, 0x45, 0x52, 0x66, 0x50, 0x43, 0x52, 0x44, 0x6c, 0x38, + 0x6f, 0x54, 0x4a, 0x72, 0x33, 0x36, 0x70, 0x31, 0x7a, 0x71, 0x46, 0x58, + 0x45, 0x52, 0x77, 0x54, 0x4a, 0x73, 0x4e, 0x4a, 0x46, 0x33, 0x32, 0x51, + 0x43, 0x6c, 0x73, 0x7a, 0x68, 0x7a, 0x6b, 0x37, 0x4e, 0x59, 0x76, 0x0a, + 0x47, 0x36, 0x6b, 0x63, 0x61, 0x51, 0x43, 0x38, 0x48, 0x68, 0x6d, 0x39, + 0x79, 0x63, 0x63, 0x52, 0x6f, 0x41, 0x3d, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, + 0x2d, 0x20, 0x45, 0x4e, 0x44, 0x20, 0x53, 0x53, 0x48, 0x32, 0x20, 0x50, + 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x20, 0x2d, 0x2d, + 0x2d, 0x2d, + ]) + }, + what: 'DSA public key (RFC4716 format) with no comment' + }, + { source: [ + 'PuTTY-User-Key-File-2: ssh-rsa', + 'Encryption: none', + 'Comment: rsa-key-20141119', + 'Public-Lines: 4', + 'AAAAB3NzaC1yc2EAAAABJQAAAIBrBWETAVAyJmuNG53jwTNDlbIcH5lrEvcx6lx5', + 'bM6EKg0XmOIH96VqUjS7eRRTTD9lpBA8hYhkrOjOx93/JWB/pcVN8/B3DYHshT9O', + 'BW1DCkrNwut2pbJ2oZOBirhhAr+xqWFr3551FqbzaCIXpOKubr4EcIwCipBl6PxL', + 'USfHgw==', + 'Private-Lines: 8', + 'AAAAgFPhnxy71xKM0NZhwPDH3BJgkoS+0jFUDdsDy/B34CJmJe/mh6VhPHXtZ5nb', + 'cMuqduD2Nj1GEPT2Oe4t8y/DWXfFkO0YNEQRw5Z3WhXheBH9Li26KmHf8TdK838u', + 'ec+4Vji/vFYaMU6wVL73joWcAT5Dnh38ZpFc98W264N5DZaNAAAAQQDJYPKtCs/l', + '46KJmN3lUANdI4QIuWQ+Zllz7p94FfdotnkvqG++Bp1wOqJSCih6UViwLfvpNZtG', + 'MCtk46WNhc0zAAAAQQCIDI3hZSz/THhai8b3nonUD65IQp5bO6p+kYZtaBn1d+c+', + 'eT7UCXwU5bW271Zasw8hq9Cdlb91fGGR41ZMfvxxAAAAQFSBjCa/fzeICVkPFBaK', + 'QUmXjQ3IcPTOr90mSAiPnAAppSwTj5SYSfE9rSVb+EhQ0hk2VKWIfocNHBD1MAN9', + 'zb4=', + 'Private-MAC: 0bc5cc619df85b79dfd3ea25f0e59230cf671cd2' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'rsa-key-20141119', + encryption: undefined, + private: new Buffer([ + 0x30, 0x82, 0x02, 0x59, 0x02, 0x01, 0x00, 0x02, 0x81, 0x80, 0x6b, 0x05, + 0x61, 0x13, 0x01, 0x50, 0x32, 0x26, 0x6b, 0x8d, 0x1b, 0x9d, 0xe3, 0xc1, + 0x33, 0x43, 0x95, 0xb2, 0x1c, 0x1f, 0x99, 0x6b, 0x12, 0xf7, 0x31, 0xea, + 0x5c, 0x79, 0x6c, 0xce, 0x84, 0x2a, 0x0d, 0x17, 0x98, 0xe2, 0x07, 0xf7, + 0xa5, 0x6a, 0x52, 0x34, 0xbb, 0x79, 0x14, 0x53, 0x4c, 0x3f, 0x65, 0xa4, + 0x10, 0x3c, 0x85, 0x88, 0x64, 0xac, 0xe8, 0xce, 0xc7, 0xdd, 0xff, 0x25, + 0x60, 0x7f, 0xa5, 0xc5, 0x4d, 0xf3, 0xf0, 0x77, 0x0d, 0x81, 0xec, 0x85, + 0x3f, 0x4e, 0x05, 0x6d, 0x43, 0x0a, 0x4a, 0xcd, 0xc2, 0xeb, 0x76, 0xa5, + 0xb2, 0x76, 0xa1, 0x93, 0x81, 0x8a, 0xb8, 0x61, 0x02, 0xbf, 0xb1, 0xa9, + 0x61, 0x6b, 0xdf, 0x9e, 0x75, 0x16, 0xa6, 0xf3, 0x68, 0x22, 0x17, 0xa4, + 0xe2, 0xae, 0x6e, 0xbe, 0x04, 0x70, 0x8c, 0x02, 0x8a, 0x90, 0x65, 0xe8, + 0xfc, 0x4b, 0x51, 0x27, 0xc7, 0x83, 0x02, 0x01, 0x25, 0x02, 0x81, 0x80, + 0x53, 0xe1, 0x9f, 0x1c, 0xbb, 0xd7, 0x12, 0x8c, 0xd0, 0xd6, 0x61, 0xc0, + 0xf0, 0xc7, 0xdc, 0x12, 0x60, 0x92, 0x84, 0xbe, 0xd2, 0x31, 0x54, 0x0d, + 0xdb, 0x03, 0xcb, 0xf0, 0x77, 0xe0, 0x22, 0x66, 0x25, 0xef, 0xe6, 0x87, + 0xa5, 0x61, 0x3c, 0x75, 0xed, 0x67, 0x99, 0xdb, 0x70, 0xcb, 0xaa, 0x76, + 0xe0, 0xf6, 0x36, 0x3d, 0x46, 0x10, 0xf4, 0xf6, 0x39, 0xee, 0x2d, 0xf3, + 0x2f, 0xc3, 0x59, 0x77, 0xc5, 0x90, 0xed, 0x18, 0x34, 0x44, 0x11, 0xc3, + 0x96, 0x77, 0x5a, 0x15, 0xe1, 0x78, 0x11, 0xfd, 0x2e, 0x2d, 0xba, 0x2a, + 0x61, 0xdf, 0xf1, 0x37, 0x4a, 0xf3, 0x7f, 0x2e, 0x79, 0xcf, 0xb8, 0x56, + 0x38, 0xbf, 0xbc, 0x56, 0x1a, 0x31, 0x4e, 0xb0, 0x54, 0xbe, 0xf7, 0x8e, + 0x85, 0x9c, 0x01, 0x3e, 0x43, 0x9e, 0x1d, 0xfc, 0x66, 0x91, 0x5c, 0xf7, + 0xc5, 0xb6, 0xeb, 0x83, 0x79, 0x0d, 0x96, 0x8d, 0x02, 0x41, 0x00, 0xc9, + 0x60, 0xf2, 0xad, 0x0a, 0xcf, 0xe5, 0xe3, 0xa2, 0x89, 0x98, 0xdd, 0xe5, + 0x50, 0x03, 0x5d, 0x23, 0x84, 0x08, 0xb9, 0x64, 0x3e, 0x66, 0x59, 0x73, + 0xee, 0x9f, 0x78, 0x15, 0xf7, 0x68, 0xb6, 0x79, 0x2f, 0xa8, 0x6f, 0xbe, + 0x06, 0x9d, 0x70, 0x3a, 0xa2, 0x52, 0x0a, 0x28, 0x7a, 0x51, 0x58, 0xb0, + 0x2d, 0xfb, 0xe9, 0x35, 0x9b, 0x46, 0x30, 0x2b, 0x64, 0xe3, 0xa5, 0x8d, + 0x85, 0xcd, 0x33, 0x02, 0x41, 0x00, 0x88, 0x0c, 0x8d, 0xe1, 0x65, 0x2c, + 0xff, 0x4c, 0x78, 0x5a, 0x8b, 0xc6, 0xf7, 0x9e, 0x89, 0xd4, 0x0f, 0xae, + 0x48, 0x42, 0x9e, 0x5b, 0x3b, 0xaa, 0x7e, 0x91, 0x86, 0x6d, 0x68, 0x19, + 0xf5, 0x77, 0xe7, 0x3e, 0x79, 0x3e, 0xd4, 0x09, 0x7c, 0x14, 0xe5, 0xb5, + 0xb6, 0xef, 0x56, 0x5a, 0xb3, 0x0f, 0x21, 0xab, 0xd0, 0x9d, 0x95, 0xbf, + 0x75, 0x7c, 0x61, 0x91, 0xe3, 0x56, 0x4c, 0x7e, 0xfc, 0x71, 0x02, 0x41, + 0x00, 0xb9, 0x0c, 0xfa, 0xac, 0xd9, 0x80, 0xc5, 0x69, 0x64, 0xed, 0x24, + 0xb0, 0x3a, 0x80, 0xe0, 0x7f, 0x19, 0xb7, 0x99, 0x50, 0x69, 0xf4, 0x26, + 0xb3, 0x10, 0x96, 0x15, 0xff, 0xa5, 0x7b, 0x90, 0xa7, 0xad, 0xa1, 0x6a, + 0x58, 0xd8, 0x21, 0xc1, 0x1b, 0x05, 0x72, 0x90, 0x93, 0xb6, 0x7e, 0x3c, + 0xe9, 0xb6, 0xa6, 0xcb, 0xcf, 0x61, 0xb1, 0x47, 0x6a, 0x88, 0xbd, 0x92, + 0xeb, 0x28, 0x1a, 0x16, 0x81, 0x02, 0x40, 0x50, 0xe4, 0xde, 0xbd, 0x5e, + 0xc0, 0xcf, 0x26, 0x8c, 0xc0, 0x37, 0x6f, 0x62, 0xcc, 0xf8, 0x01, 0x8c, + 0xc8, 0x7d, 0xfe, 0x18, 0xf7, 0xf9, 0xf6, 0xac, 0x1f, 0x2d, 0x55, 0xcf, + 0x32, 0x07, 0x93, 0x66, 0xe6, 0xe0, 0x4e, 0xde, 0xf0, 0xe1, 0xfe, 0x96, + 0x6c, 0x0b, 0xe8, 0x41, 0x2f, 0x02, 0xaf, 0x0d, 0x1a, 0x0d, 0x56, 0xc7, + 0xbd, 0xf2, 0xd4, 0x55, 0xb0, 0xb0, 0xaf, 0xda, 0x75, 0x04, 0xcd, 0x02, + 0x40, 0x54, 0x81, 0x8c, 0x26, 0xbf, 0x7f, 0x37, 0x88, 0x09, 0x59, 0x0f, + 0x14, 0x16, 0x8a, 0x41, 0x49, 0x97, 0x8d, 0x0d, 0xc8, 0x70, 0xf4, 0xce, + 0xaf, 0xdd, 0x26, 0x48, 0x08, 0x8f, 0x9c, 0x00, 0x29, 0xa5, 0x2c, 0x13, + 0x8f, 0x94, 0x98, 0x49, 0xf1, 0x3d, 0xad, 0x25, 0x5b, 0xf8, 0x48, 0x50, + 0xd2, 0x19, 0x36, 0x54, 0xa5, 0x88, 0x7e, 0x87, 0x0d, 0x1c, 0x10, 0xf5, + 0x30, 0x03, 0x7d, 0xcd, 0xbe, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x52, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x43, + 0x57, 0x51, 0x49, 0x42, 0x41, 0x41, 0x4b, 0x42, 0x67, 0x47, 0x73, 0x46, + 0x59, 0x52, 0x4d, 0x42, 0x55, 0x44, 0x49, 0x6d, 0x61, 0x34, 0x30, 0x62, + 0x6e, 0x65, 0x50, 0x42, 0x4d, 0x30, 0x4f, 0x56, 0x73, 0x68, 0x77, 0x66, + 0x6d, 0x57, 0x73, 0x53, 0x39, 0x7a, 0x48, 0x71, 0x58, 0x48, 0x6c, 0x73, + 0x7a, 0x6f, 0x51, 0x71, 0x44, 0x52, 0x65, 0x59, 0x34, 0x67, 0x66, 0x33, + 0x0a, 0x70, 0x57, 0x70, 0x53, 0x4e, 0x4c, 0x74, 0x35, 0x46, 0x46, 0x4e, + 0x4d, 0x50, 0x32, 0x57, 0x6b, 0x45, 0x44, 0x79, 0x46, 0x69, 0x47, 0x53, + 0x73, 0x36, 0x4d, 0x37, 0x48, 0x33, 0x66, 0x38, 0x6c, 0x59, 0x48, 0x2b, + 0x6c, 0x78, 0x55, 0x33, 0x7a, 0x38, 0x48, 0x63, 0x4e, 0x67, 0x65, 0x79, + 0x46, 0x50, 0x30, 0x34, 0x46, 0x62, 0x55, 0x4d, 0x4b, 0x53, 0x73, 0x33, + 0x43, 0x36, 0x33, 0x61, 0x6c, 0x0a, 0x73, 0x6e, 0x61, 0x68, 0x6b, 0x34, + 0x47, 0x4b, 0x75, 0x47, 0x45, 0x43, 0x76, 0x37, 0x47, 0x70, 0x59, 0x57, + 0x76, 0x66, 0x6e, 0x6e, 0x55, 0x57, 0x70, 0x76, 0x4e, 0x6f, 0x49, 0x68, + 0x65, 0x6b, 0x34, 0x71, 0x35, 0x75, 0x76, 0x67, 0x52, 0x77, 0x6a, 0x41, + 0x4b, 0x4b, 0x6b, 0x47, 0x58, 0x6f, 0x2f, 0x45, 0x74, 0x52, 0x4a, 0x38, + 0x65, 0x44, 0x41, 0x67, 0x45, 0x6c, 0x41, 0x6f, 0x47, 0x41, 0x0a, 0x55, + 0x2b, 0x47, 0x66, 0x48, 0x4c, 0x76, 0x58, 0x45, 0x6f, 0x7a, 0x51, 0x31, + 0x6d, 0x48, 0x41, 0x38, 0x4d, 0x66, 0x63, 0x45, 0x6d, 0x43, 0x53, 0x68, + 0x4c, 0x37, 0x53, 0x4d, 0x56, 0x51, 0x4e, 0x32, 0x77, 0x50, 0x4c, 0x38, + 0x48, 0x66, 0x67, 0x49, 0x6d, 0x59, 0x6c, 0x37, 0x2b, 0x61, 0x48, 0x70, + 0x57, 0x45, 0x38, 0x64, 0x65, 0x31, 0x6e, 0x6d, 0x64, 0x74, 0x77, 0x79, + 0x36, 0x70, 0x32, 0x0a, 0x34, 0x50, 0x59, 0x32, 0x50, 0x55, 0x59, 0x51, + 0x39, 0x50, 0x59, 0x35, 0x37, 0x69, 0x33, 0x7a, 0x4c, 0x38, 0x4e, 0x5a, + 0x64, 0x38, 0x57, 0x51, 0x37, 0x52, 0x67, 0x30, 0x52, 0x42, 0x48, 0x44, + 0x6c, 0x6e, 0x64, 0x61, 0x46, 0x65, 0x46, 0x34, 0x45, 0x66, 0x30, 0x75, + 0x4c, 0x62, 0x6f, 0x71, 0x59, 0x64, 0x2f, 0x78, 0x4e, 0x30, 0x72, 0x7a, + 0x66, 0x79, 0x35, 0x35, 0x7a, 0x37, 0x68, 0x57, 0x0a, 0x4f, 0x4c, 0x2b, + 0x38, 0x56, 0x68, 0x6f, 0x78, 0x54, 0x72, 0x42, 0x55, 0x76, 0x76, 0x65, + 0x4f, 0x68, 0x5a, 0x77, 0x42, 0x50, 0x6b, 0x4f, 0x65, 0x48, 0x66, 0x78, + 0x6d, 0x6b, 0x56, 0x7a, 0x33, 0x78, 0x62, 0x62, 0x72, 0x67, 0x33, 0x6b, + 0x4e, 0x6c, 0x6f, 0x30, 0x43, 0x51, 0x51, 0x44, 0x4a, 0x59, 0x50, 0x4b, + 0x74, 0x43, 0x73, 0x2f, 0x6c, 0x34, 0x36, 0x4b, 0x4a, 0x6d, 0x4e, 0x33, + 0x6c, 0x0a, 0x55, 0x41, 0x4e, 0x64, 0x49, 0x34, 0x51, 0x49, 0x75, 0x57, + 0x51, 0x2b, 0x5a, 0x6c, 0x6c, 0x7a, 0x37, 0x70, 0x39, 0x34, 0x46, 0x66, + 0x64, 0x6f, 0x74, 0x6e, 0x6b, 0x76, 0x71, 0x47, 0x2b, 0x2b, 0x42, 0x70, + 0x31, 0x77, 0x4f, 0x71, 0x4a, 0x53, 0x43, 0x69, 0x68, 0x36, 0x55, 0x56, + 0x69, 0x77, 0x4c, 0x66, 0x76, 0x70, 0x4e, 0x5a, 0x74, 0x47, 0x4d, 0x43, + 0x74, 0x6b, 0x34, 0x36, 0x57, 0x4e, 0x0a, 0x68, 0x63, 0x30, 0x7a, 0x41, + 0x6b, 0x45, 0x41, 0x69, 0x41, 0x79, 0x4e, 0x34, 0x57, 0x55, 0x73, 0x2f, + 0x30, 0x78, 0x34, 0x57, 0x6f, 0x76, 0x47, 0x39, 0x35, 0x36, 0x4a, 0x31, + 0x41, 0x2b, 0x75, 0x53, 0x45, 0x4b, 0x65, 0x57, 0x7a, 0x75, 0x71, 0x66, + 0x70, 0x47, 0x47, 0x62, 0x57, 0x67, 0x5a, 0x39, 0x58, 0x66, 0x6e, 0x50, + 0x6e, 0x6b, 0x2b, 0x31, 0x41, 0x6c, 0x38, 0x46, 0x4f, 0x57, 0x31, 0x0a, + 0x74, 0x75, 0x39, 0x57, 0x57, 0x72, 0x4d, 0x50, 0x49, 0x61, 0x76, 0x51, + 0x6e, 0x5a, 0x57, 0x2f, 0x64, 0x58, 0x78, 0x68, 0x6b, 0x65, 0x4e, 0x57, + 0x54, 0x48, 0x37, 0x38, 0x63, 0x51, 0x4a, 0x42, 0x41, 0x4c, 0x6b, 0x4d, + 0x2b, 0x71, 0x7a, 0x5a, 0x67, 0x4d, 0x56, 0x70, 0x5a, 0x4f, 0x30, 0x6b, + 0x73, 0x44, 0x71, 0x41, 0x34, 0x48, 0x38, 0x5a, 0x74, 0x35, 0x6c, 0x51, + 0x61, 0x66, 0x51, 0x6d, 0x0a, 0x73, 0x78, 0x43, 0x57, 0x46, 0x66, 0x2b, + 0x6c, 0x65, 0x35, 0x43, 0x6e, 0x72, 0x61, 0x46, 0x71, 0x57, 0x4e, 0x67, + 0x68, 0x77, 0x52, 0x73, 0x46, 0x63, 0x70, 0x43, 0x54, 0x74, 0x6e, 0x34, + 0x38, 0x36, 0x62, 0x61, 0x6d, 0x79, 0x38, 0x39, 0x68, 0x73, 0x55, 0x64, + 0x71, 0x69, 0x4c, 0x32, 0x53, 0x36, 0x79, 0x67, 0x61, 0x46, 0x6f, 0x45, + 0x43, 0x51, 0x46, 0x44, 0x6b, 0x33, 0x72, 0x31, 0x65, 0x0a, 0x77, 0x4d, + 0x38, 0x6d, 0x6a, 0x4d, 0x41, 0x33, 0x62, 0x32, 0x4c, 0x4d, 0x2b, 0x41, + 0x47, 0x4d, 0x79, 0x48, 0x33, 0x2b, 0x47, 0x50, 0x66, 0x35, 0x39, 0x71, + 0x77, 0x66, 0x4c, 0x56, 0x58, 0x50, 0x4d, 0x67, 0x65, 0x54, 0x5a, 0x75, + 0x62, 0x67, 0x54, 0x74, 0x37, 0x77, 0x34, 0x66, 0x36, 0x57, 0x62, 0x41, + 0x76, 0x6f, 0x51, 0x53, 0x38, 0x43, 0x72, 0x77, 0x30, 0x61, 0x44, 0x56, + 0x62, 0x48, 0x0a, 0x76, 0x66, 0x4c, 0x55, 0x56, 0x62, 0x43, 0x77, 0x72, + 0x39, 0x70, 0x31, 0x42, 0x4d, 0x30, 0x43, 0x51, 0x46, 0x53, 0x42, 0x6a, + 0x43, 0x61, 0x2f, 0x66, 0x7a, 0x65, 0x49, 0x43, 0x56, 0x6b, 0x50, 0x46, + 0x42, 0x61, 0x4b, 0x51, 0x55, 0x6d, 0x58, 0x6a, 0x51, 0x33, 0x49, 0x63, + 0x50, 0x54, 0x4f, 0x72, 0x39, 0x30, 0x6d, 0x53, 0x41, 0x69, 0x50, 0x6e, + 0x41, 0x41, 0x70, 0x70, 0x53, 0x77, 0x54, 0x0a, 0x6a, 0x35, 0x53, 0x59, + 0x53, 0x66, 0x45, 0x39, 0x72, 0x53, 0x56, 0x62, 0x2b, 0x45, 0x68, 0x51, + 0x30, 0x68, 0x6b, 0x32, 0x56, 0x4b, 0x57, 0x49, 0x66, 0x6f, 0x63, 0x4e, + 0x48, 0x42, 0x44, 0x31, 0x4d, 0x41, 0x4e, 0x39, 0x7a, 0x62, 0x34, 0x3d, + 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x52, 0x53, + 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, 0x45, + 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x72, 0x73, 0x61, 0x00, + 0x00, 0x00, 0x01, 0x25, 0x00, 0x00, 0x00, 0x80, 0x6b, 0x05, 0x61, 0x13, + 0x01, 0x50, 0x32, 0x26, 0x6b, 0x8d, 0x1b, 0x9d, 0xe3, 0xc1, 0x33, 0x43, + 0x95, 0xb2, 0x1c, 0x1f, 0x99, 0x6b, 0x12, 0xf7, 0x31, 0xea, 0x5c, 0x79, + 0x6c, 0xce, 0x84, 0x2a, 0x0d, 0x17, 0x98, 0xe2, 0x07, 0xf7, 0xa5, 0x6a, + 0x52, 0x34, 0xbb, 0x79, 0x14, 0x53, 0x4c, 0x3f, 0x65, 0xa4, 0x10, 0x3c, + 0x85, 0x88, 0x64, 0xac, 0xe8, 0xce, 0xc7, 0xdd, 0xff, 0x25, 0x60, 0x7f, + 0xa5, 0xc5, 0x4d, 0xf3, 0xf0, 0x77, 0x0d, 0x81, 0xec, 0x85, 0x3f, 0x4e, + 0x05, 0x6d, 0x43, 0x0a, 0x4a, 0xcd, 0xc2, 0xeb, 0x76, 0xa5, 0xb2, 0x76, + 0xa1, 0x93, 0x81, 0x8a, 0xb8, 0x61, 0x02, 0xbf, 0xb1, 0xa9, 0x61, 0x6b, + 0xdf, 0x9e, 0x75, 0x16, 0xa6, 0xf3, 0x68, 0x22, 0x17, 0xa4, 0xe2, 0xae, + 0x6e, 0xbe, 0x04, 0x70, 0x8c, 0x02, 0x8a, 0x90, 0x65, 0xe8, 0xfc, 0x4b, + 0x51, 0x27, 0xc7, 0x83, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x50, + 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x47, 0x63, 0x4d, 0x41, 0x30, 0x47, 0x43, + 0x53, 0x71, 0x47, 0x53, 0x49, 0x62, 0x33, 0x44, 0x51, 0x45, 0x42, 0x41, + 0x51, 0x55, 0x41, 0x41, 0x34, 0x47, 0x4b, 0x41, 0x44, 0x43, 0x42, 0x68, + 0x67, 0x4b, 0x42, 0x67, 0x47, 0x73, 0x46, 0x59, 0x52, 0x4d, 0x42, 0x55, + 0x44, 0x49, 0x6d, 0x61, 0x34, 0x30, 0x62, 0x6e, 0x65, 0x50, 0x42, 0x4d, + 0x30, 0x4f, 0x56, 0x73, 0x68, 0x77, 0x66, 0x0a, 0x6d, 0x57, 0x73, 0x53, + 0x39, 0x7a, 0x48, 0x71, 0x58, 0x48, 0x6c, 0x73, 0x7a, 0x6f, 0x51, 0x71, + 0x44, 0x52, 0x65, 0x59, 0x34, 0x67, 0x66, 0x33, 0x70, 0x57, 0x70, 0x53, + 0x4e, 0x4c, 0x74, 0x35, 0x46, 0x46, 0x4e, 0x4d, 0x50, 0x32, 0x57, 0x6b, + 0x45, 0x44, 0x79, 0x46, 0x69, 0x47, 0x53, 0x73, 0x36, 0x4d, 0x37, 0x48, + 0x33, 0x66, 0x38, 0x6c, 0x59, 0x48, 0x2b, 0x6c, 0x78, 0x55, 0x33, 0x7a, + 0x0a, 0x38, 0x48, 0x63, 0x4e, 0x67, 0x65, 0x79, 0x46, 0x50, 0x30, 0x34, + 0x46, 0x62, 0x55, 0x4d, 0x4b, 0x53, 0x73, 0x33, 0x43, 0x36, 0x33, 0x61, + 0x6c, 0x73, 0x6e, 0x61, 0x68, 0x6b, 0x34, 0x47, 0x4b, 0x75, 0x47, 0x45, + 0x43, 0x76, 0x37, 0x47, 0x70, 0x59, 0x57, 0x76, 0x66, 0x6e, 0x6e, 0x55, + 0x57, 0x70, 0x76, 0x4e, 0x6f, 0x49, 0x68, 0x65, 0x6b, 0x34, 0x71, 0x35, + 0x75, 0x76, 0x67, 0x52, 0x77, 0x0a, 0x6a, 0x41, 0x4b, 0x4b, 0x6b, 0x47, + 0x58, 0x6f, 0x2f, 0x45, 0x74, 0x52, 0x4a, 0x38, 0x65, 0x44, 0x41, 0x67, + 0x45, 0x6c, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, + 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, + ]), + ppk: true, + privateMAC: '0bc5cc619df85b79dfd3ea25f0e59230cf671cd2', + _converted: true, + _macresult: true + }, + what: 'Unencrypted RSA key (PPK format)' + }, + { source: [ + 'PuTTY-User-Key-File-2: ssh-rsa', + 'Encryption: aes256-cbc', + 'Comment: rsa-key-20141119', + 'Public-Lines: 4', + 'AAAAB3NzaC1yc2EAAAABJQAAAIBrBWETAVAyJmuNG53jwTNDlbIcH5lrEvcx6lx5', + 'bM6EKg0XmOIH96VqUjS7eRRTTD9lpBA8hYhkrOjOx93/JWB/pcVN8/B3DYHshT9O', + 'BW1DCkrNwut2pbJ2oZOBirhhAr+xqWFr3551FqbzaCIXpOKubr4EcIwCipBl6PxL', + 'USfHgw==', + 'Private-Lines: 8', + '8O3NrBePR4+4RHHys8wrRKCmgx3Gsdz1cKoRJJDgnnrQxuAxBTVUlVTC2vzSOXrP', + 'jlKdRP9DbtrL5YF8g9HkMPpzzTdgpiEAGikpIc+L0sJhN+S9VvMoXRRKqyuB7o1C', + 'xZhAeRaZ68izdUUbFd7ajUwBNpGoFppOznGXyf/3/Ao9FfoTKReZzeBd/e2/JFhc', + 'nsYkSbtWfKQBVXF1Fhr10UwRWSMaVJSDkcSuk8ghICoKBBCgRBnZFap0SR77oIJh', + 'DKgmNFktoKzEqh111vYPhQyEEyGNxpD0aEPaGUJEjPEd3C5a46n7mIiqrNX7QJoo', + 'xxZtkueGdXWaoe5mBf1tFc+nCA1l72nUlghJZooQhnO9NPpieu6NNZ8X+tFQ1Rq/', + 'xvOZHzpDOOeOgWdV7oAmRDbDjYPh0H67z2OKCFaP0Z9kgmnwqV2IJvTDrexj1VwY', + '6kFaPldnK+ohXl37oVIlWA==', + 'Private-MAC: 9d09a15a122e48955682ba969d33c75ba8e4be2c' + ].join('\n'), + expected: { + fulltype: 'ssh-rsa', + type: 'rsa', + curve: undefined, + extra: undefined, + comment: 'rsa-key-20141119', + encryption: 'aes256-cbc', + private: new Buffer([ + 0xf0, 0xed, 0xcd, 0xac, 0x17, 0x8f, 0x47, 0x8f, 0xb8, 0x44, 0x71, 0xf2, + 0xb3, 0xcc, 0x2b, 0x44, 0xa0, 0xa6, 0x83, 0x1d, 0xc6, 0xb1, 0xdc, 0xf5, + 0x70, 0xaa, 0x11, 0x24, 0x90, 0xe0, 0x9e, 0x7a, 0xd0, 0xc6, 0xe0, 0x31, + 0x05, 0x35, 0x54, 0x95, 0x54, 0xc2, 0xda, 0xfc, 0xd2, 0x39, 0x7a, 0xcf, + 0x8e, 0x52, 0x9d, 0x44, 0xff, 0x43, 0x6e, 0xda, 0xcb, 0xe5, 0x81, 0x7c, + 0x83, 0xd1, 0xe4, 0x30, 0xfa, 0x73, 0xcd, 0x37, 0x60, 0xa6, 0x21, 0x00, + 0x1a, 0x29, 0x29, 0x21, 0xcf, 0x8b, 0xd2, 0xc2, 0x61, 0x37, 0xe4, 0xbd, + 0x56, 0xf3, 0x28, 0x5d, 0x14, 0x4a, 0xab, 0x2b, 0x81, 0xee, 0x8d, 0x42, + 0xc5, 0x98, 0x40, 0x79, 0x16, 0x99, 0xeb, 0xc8, 0xb3, 0x75, 0x45, 0x1b, + 0x15, 0xde, 0xda, 0x8d, 0x4c, 0x01, 0x36, 0x91, 0xa8, 0x16, 0x9a, 0x4e, + 0xce, 0x71, 0x97, 0xc9, 0xff, 0xf7, 0xfc, 0x0a, 0x3d, 0x15, 0xfa, 0x13, + 0x29, 0x17, 0x99, 0xcd, 0xe0, 0x5d, 0xfd, 0xed, 0xbf, 0x24, 0x58, 0x5c, + 0x9e, 0xc6, 0x24, 0x49, 0xbb, 0x56, 0x7c, 0xa4, 0x01, 0x55, 0x71, 0x75, + 0x16, 0x1a, 0xf5, 0xd1, 0x4c, 0x11, 0x59, 0x23, 0x1a, 0x54, 0x94, 0x83, + 0x91, 0xc4, 0xae, 0x93, 0xc8, 0x21, 0x20, 0x2a, 0x0a, 0x04, 0x10, 0xa0, + 0x44, 0x19, 0xd9, 0x15, 0xaa, 0x74, 0x49, 0x1e, 0xfb, 0xa0, 0x82, 0x61, + 0x0c, 0xa8, 0x26, 0x34, 0x59, 0x2d, 0xa0, 0xac, 0xc4, 0xaa, 0x1d, 0x75, + 0xd6, 0xf6, 0x0f, 0x85, 0x0c, 0x84, 0x13, 0x21, 0x8d, 0xc6, 0x90, 0xf4, + 0x68, 0x43, 0xda, 0x19, 0x42, 0x44, 0x8c, 0xf1, 0x1d, 0xdc, 0x2e, 0x5a, + 0xe3, 0xa9, 0xfb, 0x98, 0x88, 0xaa, 0xac, 0xd5, 0xfb, 0x40, 0x9a, 0x28, + 0xc7, 0x16, 0x6d, 0x92, 0xe7, 0x86, 0x75, 0x75, 0x9a, 0xa1, 0xee, 0x66, + 0x05, 0xfd, 0x6d, 0x15, 0xcf, 0xa7, 0x08, 0x0d, 0x65, 0xef, 0x69, 0xd4, + 0x96, 0x08, 0x49, 0x66, 0x8a, 0x10, 0x86, 0x73, 0xbd, 0x34, 0xfa, 0x62, + 0x7a, 0xee, 0x8d, 0x35, 0x9f, 0x17, 0xfa, 0xd1, 0x50, 0xd5, 0x1a, 0xbf, + 0xc6, 0xf3, 0x99, 0x1f, 0x3a, 0x43, 0x38, 0xe7, 0x8e, 0x81, 0x67, 0x55, + 0xee, 0x80, 0x26, 0x44, 0x36, 0xc3, 0x8d, 0x83, 0xe1, 0xd0, 0x7e, 0xbb, + 0xcf, 0x63, 0x8a, 0x08, 0x56, 0x8f, 0xd1, 0x9f, 0x64, 0x82, 0x69, 0xf0, + 0xa9, 0x5d, 0x88, 0x26, 0xf4, 0xc3, 0xad, 0xec, 0x63, 0xd5, 0x5c, 0x18, + 0xea, 0x41, 0x5a, 0x3e, 0x57, 0x67, 0x2b, 0xea, 0x21, 0x5e, 0x5d, 0xfb, + 0xa1, 0x52, 0x25, 0x58, + ]), + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x72, 0x73, 0x61, 0x00, + 0x00, 0x00, 0x01, 0x25, 0x00, 0x00, 0x00, 0x80, 0x6b, 0x05, 0x61, 0x13, + 0x01, 0x50, 0x32, 0x26, 0x6b, 0x8d, 0x1b, 0x9d, 0xe3, 0xc1, 0x33, 0x43, + 0x95, 0xb2, 0x1c, 0x1f, 0x99, 0x6b, 0x12, 0xf7, 0x31, 0xea, 0x5c, 0x79, + 0x6c, 0xce, 0x84, 0x2a, 0x0d, 0x17, 0x98, 0xe2, 0x07, 0xf7, 0xa5, 0x6a, + 0x52, 0x34, 0xbb, 0x79, 0x14, 0x53, 0x4c, 0x3f, 0x65, 0xa4, 0x10, 0x3c, + 0x85, 0x88, 0x64, 0xac, 0xe8, 0xce, 0xc7, 0xdd, 0xff, 0x25, 0x60, 0x7f, + 0xa5, 0xc5, 0x4d, 0xf3, 0xf0, 0x77, 0x0d, 0x81, 0xec, 0x85, 0x3f, 0x4e, + 0x05, 0x6d, 0x43, 0x0a, 0x4a, 0xcd, 0xc2, 0xeb, 0x76, 0xa5, 0xb2, 0x76, + 0xa1, 0x93, 0x81, 0x8a, 0xb8, 0x61, 0x02, 0xbf, 0xb1, 0xa9, 0x61, 0x6b, + 0xdf, 0x9e, 0x75, 0x16, 0xa6, 0xf3, 0x68, 0x22, 0x17, 0xa4, 0xe2, 0xae, + 0x6e, 0xbe, 0x04, 0x70, 0x8c, 0x02, 0x8a, 0x90, 0x65, 0xe8, 0xfc, 0x4b, + 0x51, 0x27, 0xc7, 0x83, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x50, + 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x47, 0x63, 0x4d, 0x41, 0x30, 0x47, 0x43, + 0x53, 0x71, 0x47, 0x53, 0x49, 0x62, 0x33, 0x44, 0x51, 0x45, 0x42, 0x41, + 0x51, 0x55, 0x41, 0x41, 0x34, 0x47, 0x4b, 0x41, 0x44, 0x43, 0x42, 0x68, + 0x67, 0x4b, 0x42, 0x67, 0x47, 0x73, 0x46, 0x59, 0x52, 0x4d, 0x42, 0x55, + 0x44, 0x49, 0x6d, 0x61, 0x34, 0x30, 0x62, 0x6e, 0x65, 0x50, 0x42, 0x4d, + 0x30, 0x4f, 0x56, 0x73, 0x68, 0x77, 0x66, 0x0a, 0x6d, 0x57, 0x73, 0x53, + 0x39, 0x7a, 0x48, 0x71, 0x58, 0x48, 0x6c, 0x73, 0x7a, 0x6f, 0x51, 0x71, + 0x44, 0x52, 0x65, 0x59, 0x34, 0x67, 0x66, 0x33, 0x70, 0x57, 0x70, 0x53, + 0x4e, 0x4c, 0x74, 0x35, 0x46, 0x46, 0x4e, 0x4d, 0x50, 0x32, 0x57, 0x6b, + 0x45, 0x44, 0x79, 0x46, 0x69, 0x47, 0x53, 0x73, 0x36, 0x4d, 0x37, 0x48, + 0x33, 0x66, 0x38, 0x6c, 0x59, 0x48, 0x2b, 0x6c, 0x78, 0x55, 0x33, 0x7a, + 0x0a, 0x38, 0x48, 0x63, 0x4e, 0x67, 0x65, 0x79, 0x46, 0x50, 0x30, 0x34, + 0x46, 0x62, 0x55, 0x4d, 0x4b, 0x53, 0x73, 0x33, 0x43, 0x36, 0x33, 0x61, + 0x6c, 0x73, 0x6e, 0x61, 0x68, 0x6b, 0x34, 0x47, 0x4b, 0x75, 0x47, 0x45, + 0x43, 0x76, 0x37, 0x47, 0x70, 0x59, 0x57, 0x76, 0x66, 0x6e, 0x6e, 0x55, + 0x57, 0x70, 0x76, 0x4e, 0x6f, 0x49, 0x68, 0x65, 0x6b, 0x34, 0x71, 0x35, + 0x75, 0x76, 0x67, 0x52, 0x77, 0x0a, 0x6a, 0x41, 0x4b, 0x4b, 0x6b, 0x47, + 0x58, 0x6f, 0x2f, 0x45, 0x74, 0x52, 0x4a, 0x38, 0x65, 0x44, 0x41, 0x67, + 0x45, 0x6c, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, + 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, + ]), + ppk: true, + privateMAC: '9d09a15a122e48955682ba969d33c75ba8e4be2c' + }, + what: 'Encrypted RSA key (PPK format)' + }, + { source: [ + 'PuTTY-User-Key-File-2: ssh-dss', + 'Encryption: none', + 'Comment: dsa-key-20141202', + 'Public-Lines: 10', + 'AAAAB3NzaC1kc3MAAACBAJn2I8YefRo3BsEeinQt8KQ4cEyArAs7Y/W733oRSYOI', + 'zWF1Ju124ysKrmg2okv+05CYcjV3Yp4AzQeomYAlgmB/7xCEnWaEnxCwAxmrrJMm', + 'PrkwNjHOIi7yM5QOE90IM/Q+IJA4EPBfSb+Xr8fYhrp53KNHVSnc2KkOqpo2FsIj', + 'AAAAFQC4NlP50GqyUqq2B82Vh/w5j3TzwQAAAIAeSGom9LLNdzcwCHnGfxKNnEz3', + '55KITADTxiIpBvnQW+eDHwQvIw6V2Oc73bKCu5ZirZmIMW5w6KjQVwkuQBoF9Koq', + '/2u6VeevtL9pD6TBzSLMVw5pV3PmE4/C/eLiaUxZLIHdbzpqPkAvAUBrXKkj0ijz', + 'cNzCp1fuF8H0pvR8yQAAAIAmvV+kqWhUgDYwNNz1qDaoS8XdsOponutZ/0stRQ66', + 'mKAy8kNVNNQ6oUx1XFl1WUt4iyFY/2Rz2fZhLz5/TbZRK5ygo666WgnxB/Ud4GAx', + '/BPQTghOJJOL00vJk+8jVCGNDc942V6nFXznDMXwqxhRCW6dm+2lTh7ntrli8mCk', + '5g==', + 'Private-Lines: 1', + 'AAAAFAiWSgDAM5Mq9r5dj3uPGnYvFbPb', + 'Private-MAC: c07e1d3ae080613e7cdb63ab06bde2e805bc420b' + ].join('\n'), + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: 'dsa-key-20141202', + encryption: undefined, + private: new Buffer([ + 0x30, 0x82, 0x01, 0xba, 0x02, 0x01, 0x00, 0x02, 0x81, 0x81, 0x00, 0x99, + 0xf6, 0x23, 0xc6, 0x1e, 0x7d, 0x1a, 0x37, 0x06, 0xc1, 0x1e, 0x8a, 0x74, + 0x2d, 0xf0, 0xa4, 0x38, 0x70, 0x4c, 0x80, 0xac, 0x0b, 0x3b, 0x63, 0xf5, + 0xbb, 0xdf, 0x7a, 0x11, 0x49, 0x83, 0x88, 0xcd, 0x61, 0x75, 0x26, 0xed, + 0x76, 0xe3, 0x2b, 0x0a, 0xae, 0x68, 0x36, 0xa2, 0x4b, 0xfe, 0xd3, 0x90, + 0x98, 0x72, 0x35, 0x77, 0x62, 0x9e, 0x00, 0xcd, 0x07, 0xa8, 0x99, 0x80, + 0x25, 0x82, 0x60, 0x7f, 0xef, 0x10, 0x84, 0x9d, 0x66, 0x84, 0x9f, 0x10, + 0xb0, 0x03, 0x19, 0xab, 0xac, 0x93, 0x26, 0x3e, 0xb9, 0x30, 0x36, 0x31, + 0xce, 0x22, 0x2e, 0xf2, 0x33, 0x94, 0x0e, 0x13, 0xdd, 0x08, 0x33, 0xf4, + 0x3e, 0x20, 0x90, 0x38, 0x10, 0xf0, 0x5f, 0x49, 0xbf, 0x97, 0xaf, 0xc7, + 0xd8, 0x86, 0xba, 0x79, 0xdc, 0xa3, 0x47, 0x55, 0x29, 0xdc, 0xd8, 0xa9, + 0x0e, 0xaa, 0x9a, 0x36, 0x16, 0xc2, 0x23, 0x02, 0x15, 0x00, 0xb8, 0x36, + 0x53, 0xf9, 0xd0, 0x6a, 0xb2, 0x52, 0xaa, 0xb6, 0x07, 0xcd, 0x95, 0x87, + 0xfc, 0x39, 0x8f, 0x74, 0xf3, 0xc1, 0x02, 0x81, 0x80, 0x1e, 0x48, 0x6a, + 0x26, 0xf4, 0xb2, 0xcd, 0x77, 0x37, 0x30, 0x08, 0x79, 0xc6, 0x7f, 0x12, + 0x8d, 0x9c, 0x4c, 0xf7, 0xe7, 0x92, 0x88, 0x4c, 0x00, 0xd3, 0xc6, 0x22, + 0x29, 0x06, 0xf9, 0xd0, 0x5b, 0xe7, 0x83, 0x1f, 0x04, 0x2f, 0x23, 0x0e, + 0x95, 0xd8, 0xe7, 0x3b, 0xdd, 0xb2, 0x82, 0xbb, 0x96, 0x62, 0xad, 0x99, + 0x88, 0x31, 0x6e, 0x70, 0xe8, 0xa8, 0xd0, 0x57, 0x09, 0x2e, 0x40, 0x1a, + 0x05, 0xf4, 0xaa, 0x2a, 0xff, 0x6b, 0xba, 0x55, 0xe7, 0xaf, 0xb4, 0xbf, + 0x69, 0x0f, 0xa4, 0xc1, 0xcd, 0x22, 0xcc, 0x57, 0x0e, 0x69, 0x57, 0x73, + 0xe6, 0x13, 0x8f, 0xc2, 0xfd, 0xe2, 0xe2, 0x69, 0x4c, 0x59, 0x2c, 0x81, + 0xdd, 0x6f, 0x3a, 0x6a, 0x3e, 0x40, 0x2f, 0x01, 0x40, 0x6b, 0x5c, 0xa9, + 0x23, 0xd2, 0x28, 0xf3, 0x70, 0xdc, 0xc2, 0xa7, 0x57, 0xee, 0x17, 0xc1, + 0xf4, 0xa6, 0xf4, 0x7c, 0xc9, 0x02, 0x81, 0x80, 0x26, 0xbd, 0x5f, 0xa4, + 0xa9, 0x68, 0x54, 0x80, 0x36, 0x30, 0x34, 0xdc, 0xf5, 0xa8, 0x36, 0xa8, + 0x4b, 0xc5, 0xdd, 0xb0, 0xea, 0x68, 0x9e, 0xeb, 0x59, 0xff, 0x4b, 0x2d, + 0x45, 0x0e, 0xba, 0x98, 0xa0, 0x32, 0xf2, 0x43, 0x55, 0x34, 0xd4, 0x3a, + 0xa1, 0x4c, 0x75, 0x5c, 0x59, 0x75, 0x59, 0x4b, 0x78, 0x8b, 0x21, 0x58, + 0xff, 0x64, 0x73, 0xd9, 0xf6, 0x61, 0x2f, 0x3e, 0x7f, 0x4d, 0xb6, 0x51, + 0x2b, 0x9c, 0xa0, 0xa3, 0xae, 0xba, 0x5a, 0x09, 0xf1, 0x07, 0xf5, 0x1d, + 0xe0, 0x60, 0x31, 0xfc, 0x13, 0xd0, 0x4e, 0x08, 0x4e, 0x24, 0x93, 0x8b, + 0xd3, 0x4b, 0xc9, 0x93, 0xef, 0x23, 0x54, 0x21, 0x8d, 0x0d, 0xcf, 0x78, + 0xd9, 0x5e, 0xa7, 0x15, 0x7c, 0xe7, 0x0c, 0xc5, 0xf0, 0xab, 0x18, 0x51, + 0x09, 0x6e, 0x9d, 0x9b, 0xed, 0xa5, 0x4e, 0x1e, 0xe7, 0xb6, 0xb9, 0x62, + 0xf2, 0x60, 0xa4, 0xe6, 0x02, 0x14, 0x08, 0x96, 0x4a, 0x00, 0xc0, 0x33, + 0x93, 0x2a, 0xf6, 0xbe, 0x5d, 0x8f, 0x7b, 0x8f, 0x1a, 0x76, 0x2f, 0x15, + 0xb3, 0xdb, + ]), + privateOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x44, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x42, + 0x75, 0x67, 0x49, 0x42, 0x41, 0x41, 0x4b, 0x42, 0x67, 0x51, 0x43, 0x5a, + 0x39, 0x69, 0x50, 0x47, 0x48, 0x6e, 0x30, 0x61, 0x4e, 0x77, 0x62, 0x42, + 0x48, 0x6f, 0x70, 0x30, 0x4c, 0x66, 0x43, 0x6b, 0x4f, 0x48, 0x42, 0x4d, + 0x67, 0x4b, 0x77, 0x4c, 0x4f, 0x32, 0x50, 0x31, 0x75, 0x39, 0x39, 0x36, + 0x45, 0x55, 0x6d, 0x44, 0x69, 0x4d, 0x31, 0x68, 0x64, 0x53, 0x62, 0x74, + 0x0a, 0x64, 0x75, 0x4d, 0x72, 0x43, 0x71, 0x35, 0x6f, 0x4e, 0x71, 0x4a, + 0x4c, 0x2f, 0x74, 0x4f, 0x51, 0x6d, 0x48, 0x49, 0x31, 0x64, 0x32, 0x4b, + 0x65, 0x41, 0x4d, 0x30, 0x48, 0x71, 0x4a, 0x6d, 0x41, 0x4a, 0x59, 0x4a, + 0x67, 0x66, 0x2b, 0x38, 0x51, 0x68, 0x4a, 0x31, 0x6d, 0x68, 0x4a, 0x38, + 0x51, 0x73, 0x41, 0x4d, 0x5a, 0x71, 0x36, 0x79, 0x54, 0x4a, 0x6a, 0x36, + 0x35, 0x4d, 0x44, 0x59, 0x78, 0x0a, 0x7a, 0x69, 0x49, 0x75, 0x38, 0x6a, + 0x4f, 0x55, 0x44, 0x68, 0x50, 0x64, 0x43, 0x44, 0x50, 0x30, 0x50, 0x69, + 0x43, 0x51, 0x4f, 0x42, 0x44, 0x77, 0x58, 0x30, 0x6d, 0x2f, 0x6c, 0x36, + 0x2f, 0x48, 0x32, 0x49, 0x61, 0x36, 0x65, 0x64, 0x79, 0x6a, 0x52, 0x31, + 0x55, 0x70, 0x33, 0x4e, 0x69, 0x70, 0x44, 0x71, 0x71, 0x61, 0x4e, 0x68, + 0x62, 0x43, 0x49, 0x77, 0x49, 0x56, 0x41, 0x4c, 0x67, 0x32, 0x0a, 0x55, + 0x2f, 0x6e, 0x51, 0x61, 0x72, 0x4a, 0x53, 0x71, 0x72, 0x59, 0x48, 0x7a, + 0x5a, 0x57, 0x48, 0x2f, 0x44, 0x6d, 0x50, 0x64, 0x50, 0x50, 0x42, 0x41, + 0x6f, 0x47, 0x41, 0x48, 0x6b, 0x68, 0x71, 0x4a, 0x76, 0x53, 0x79, 0x7a, + 0x58, 0x63, 0x33, 0x4d, 0x41, 0x68, 0x35, 0x78, 0x6e, 0x38, 0x53, 0x6a, + 0x5a, 0x78, 0x4d, 0x39, 0x2b, 0x65, 0x53, 0x69, 0x45, 0x77, 0x41, 0x30, + 0x38, 0x59, 0x69, 0x0a, 0x4b, 0x51, 0x62, 0x35, 0x30, 0x46, 0x76, 0x6e, + 0x67, 0x78, 0x38, 0x45, 0x4c, 0x79, 0x4d, 0x4f, 0x6c, 0x64, 0x6a, 0x6e, + 0x4f, 0x39, 0x32, 0x79, 0x67, 0x72, 0x75, 0x57, 0x59, 0x71, 0x32, 0x5a, + 0x69, 0x44, 0x46, 0x75, 0x63, 0x4f, 0x69, 0x6f, 0x30, 0x46, 0x63, 0x4a, + 0x4c, 0x6b, 0x41, 0x61, 0x42, 0x66, 0x53, 0x71, 0x4b, 0x76, 0x39, 0x72, + 0x75, 0x6c, 0x58, 0x6e, 0x72, 0x37, 0x53, 0x2f, 0x0a, 0x61, 0x51, 0x2b, + 0x6b, 0x77, 0x63, 0x30, 0x69, 0x7a, 0x46, 0x63, 0x4f, 0x61, 0x56, 0x64, + 0x7a, 0x35, 0x68, 0x4f, 0x50, 0x77, 0x76, 0x33, 0x69, 0x34, 0x6d, 0x6c, + 0x4d, 0x57, 0x53, 0x79, 0x42, 0x33, 0x57, 0x38, 0x36, 0x61, 0x6a, 0x35, + 0x41, 0x4c, 0x77, 0x46, 0x41, 0x61, 0x31, 0x79, 0x70, 0x49, 0x39, 0x49, + 0x6f, 0x38, 0x33, 0x44, 0x63, 0x77, 0x71, 0x64, 0x58, 0x37, 0x68, 0x66, + 0x42, 0x0a, 0x39, 0x4b, 0x62, 0x30, 0x66, 0x4d, 0x6b, 0x43, 0x67, 0x59, + 0x41, 0x6d, 0x76, 0x56, 0x2b, 0x6b, 0x71, 0x57, 0x68, 0x55, 0x67, 0x44, + 0x59, 0x77, 0x4e, 0x4e, 0x7a, 0x31, 0x71, 0x44, 0x61, 0x6f, 0x53, 0x38, + 0x58, 0x64, 0x73, 0x4f, 0x70, 0x6f, 0x6e, 0x75, 0x74, 0x5a, 0x2f, 0x30, + 0x73, 0x74, 0x52, 0x51, 0x36, 0x36, 0x6d, 0x4b, 0x41, 0x79, 0x38, 0x6b, + 0x4e, 0x56, 0x4e, 0x4e, 0x51, 0x36, 0x0a, 0x6f, 0x55, 0x78, 0x31, 0x58, + 0x46, 0x6c, 0x31, 0x57, 0x55, 0x74, 0x34, 0x69, 0x79, 0x46, 0x59, 0x2f, + 0x32, 0x52, 0x7a, 0x32, 0x66, 0x5a, 0x68, 0x4c, 0x7a, 0x35, 0x2f, 0x54, + 0x62, 0x5a, 0x52, 0x4b, 0x35, 0x79, 0x67, 0x6f, 0x36, 0x36, 0x36, 0x57, + 0x67, 0x6e, 0x78, 0x42, 0x2f, 0x55, 0x64, 0x34, 0x47, 0x41, 0x78, 0x2f, + 0x42, 0x50, 0x51, 0x54, 0x67, 0x68, 0x4f, 0x4a, 0x4a, 0x4f, 0x4c, 0x0a, + 0x30, 0x30, 0x76, 0x4a, 0x6b, 0x2b, 0x38, 0x6a, 0x56, 0x43, 0x47, 0x4e, + 0x44, 0x63, 0x39, 0x34, 0x32, 0x56, 0x36, 0x6e, 0x46, 0x58, 0x7a, 0x6e, + 0x44, 0x4d, 0x58, 0x77, 0x71, 0x78, 0x68, 0x52, 0x43, 0x57, 0x36, 0x64, + 0x6d, 0x2b, 0x32, 0x6c, 0x54, 0x68, 0x37, 0x6e, 0x74, 0x72, 0x6c, 0x69, + 0x38, 0x6d, 0x43, 0x6b, 0x35, 0x67, 0x49, 0x55, 0x43, 0x4a, 0x5a, 0x4b, + 0x41, 0x4d, 0x41, 0x7a, 0x0a, 0x6b, 0x79, 0x72, 0x32, 0x76, 0x6c, 0x32, + 0x50, 0x65, 0x34, 0x38, 0x61, 0x64, 0x69, 0x38, 0x56, 0x73, 0x39, 0x73, + 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x44, + 0x53, 0x41, 0x20, 0x50, 0x52, 0x49, 0x56, 0x41, 0x54, 0x45, 0x20, 0x4b, + 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x00, + 0x00, 0x00, 0x81, 0x00, 0x99, 0xf6, 0x23, 0xc6, 0x1e, 0x7d, 0x1a, 0x37, + 0x06, 0xc1, 0x1e, 0x8a, 0x74, 0x2d, 0xf0, 0xa4, 0x38, 0x70, 0x4c, 0x80, + 0xac, 0x0b, 0x3b, 0x63, 0xf5, 0xbb, 0xdf, 0x7a, 0x11, 0x49, 0x83, 0x88, + 0xcd, 0x61, 0x75, 0x26, 0xed, 0x76, 0xe3, 0x2b, 0x0a, 0xae, 0x68, 0x36, + 0xa2, 0x4b, 0xfe, 0xd3, 0x90, 0x98, 0x72, 0x35, 0x77, 0x62, 0x9e, 0x00, + 0xcd, 0x07, 0xa8, 0x99, 0x80, 0x25, 0x82, 0x60, 0x7f, 0xef, 0x10, 0x84, + 0x9d, 0x66, 0x84, 0x9f, 0x10, 0xb0, 0x03, 0x19, 0xab, 0xac, 0x93, 0x26, + 0x3e, 0xb9, 0x30, 0x36, 0x31, 0xce, 0x22, 0x2e, 0xf2, 0x33, 0x94, 0x0e, + 0x13, 0xdd, 0x08, 0x33, 0xf4, 0x3e, 0x20, 0x90, 0x38, 0x10, 0xf0, 0x5f, + 0x49, 0xbf, 0x97, 0xaf, 0xc7, 0xd8, 0x86, 0xba, 0x79, 0xdc, 0xa3, 0x47, + 0x55, 0x29, 0xdc, 0xd8, 0xa9, 0x0e, 0xaa, 0x9a, 0x36, 0x16, 0xc2, 0x23, + 0x00, 0x00, 0x00, 0x15, 0x00, 0xb8, 0x36, 0x53, 0xf9, 0xd0, 0x6a, 0xb2, + 0x52, 0xaa, 0xb6, 0x07, 0xcd, 0x95, 0x87, 0xfc, 0x39, 0x8f, 0x74, 0xf3, + 0xc1, 0x00, 0x00, 0x00, 0x80, 0x1e, 0x48, 0x6a, 0x26, 0xf4, 0xb2, 0xcd, + 0x77, 0x37, 0x30, 0x08, 0x79, 0xc6, 0x7f, 0x12, 0x8d, 0x9c, 0x4c, 0xf7, + 0xe7, 0x92, 0x88, 0x4c, 0x00, 0xd3, 0xc6, 0x22, 0x29, 0x06, 0xf9, 0xd0, + 0x5b, 0xe7, 0x83, 0x1f, 0x04, 0x2f, 0x23, 0x0e, 0x95, 0xd8, 0xe7, 0x3b, + 0xdd, 0xb2, 0x82, 0xbb, 0x96, 0x62, 0xad, 0x99, 0x88, 0x31, 0x6e, 0x70, + 0xe8, 0xa8, 0xd0, 0x57, 0x09, 0x2e, 0x40, 0x1a, 0x05, 0xf4, 0xaa, 0x2a, + 0xff, 0x6b, 0xba, 0x55, 0xe7, 0xaf, 0xb4, 0xbf, 0x69, 0x0f, 0xa4, 0xc1, + 0xcd, 0x22, 0xcc, 0x57, 0x0e, 0x69, 0x57, 0x73, 0xe6, 0x13, 0x8f, 0xc2, + 0xfd, 0xe2, 0xe2, 0x69, 0x4c, 0x59, 0x2c, 0x81, 0xdd, 0x6f, 0x3a, 0x6a, + 0x3e, 0x40, 0x2f, 0x01, 0x40, 0x6b, 0x5c, 0xa9, 0x23, 0xd2, 0x28, 0xf3, + 0x70, 0xdc, 0xc2, 0xa7, 0x57, 0xee, 0x17, 0xc1, 0xf4, 0xa6, 0xf4, 0x7c, + 0xc9, 0x00, 0x00, 0x00, 0x80, 0x26, 0xbd, 0x5f, 0xa4, 0xa9, 0x68, 0x54, + 0x80, 0x36, 0x30, 0x34, 0xdc, 0xf5, 0xa8, 0x36, 0xa8, 0x4b, 0xc5, 0xdd, + 0xb0, 0xea, 0x68, 0x9e, 0xeb, 0x59, 0xff, 0x4b, 0x2d, 0x45, 0x0e, 0xba, + 0x98, 0xa0, 0x32, 0xf2, 0x43, 0x55, 0x34, 0xd4, 0x3a, 0xa1, 0x4c, 0x75, + 0x5c, 0x59, 0x75, 0x59, 0x4b, 0x78, 0x8b, 0x21, 0x58, 0xff, 0x64, 0x73, + 0xd9, 0xf6, 0x61, 0x2f, 0x3e, 0x7f, 0x4d, 0xb6, 0x51, 0x2b, 0x9c, 0xa0, + 0xa3, 0xae, 0xba, 0x5a, 0x09, 0xf1, 0x07, 0xf5, 0x1d, 0xe0, 0x60, 0x31, + 0xfc, 0x13, 0xd0, 0x4e, 0x08, 0x4e, 0x24, 0x93, 0x8b, 0xd3, 0x4b, 0xc9, + 0x93, 0xef, 0x23, 0x54, 0x21, 0x8d, 0x0d, 0xcf, 0x78, 0xd9, 0x5e, 0xa7, + 0x15, 0x7c, 0xe7, 0x0c, 0xc5, 0xf0, 0xab, 0x18, 0x51, 0x09, 0x6e, 0x9d, + 0x9b, 0xed, 0xa5, 0x4e, 0x1e, 0xe7, 0xb6, 0xb9, 0x62, 0xf2, 0x60, 0xa4, + 0xe6, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x50, + 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x42, 0x74, 0x6a, 0x43, 0x43, 0x41, + 0x53, 0x73, 0x47, 0x42, 0x79, 0x71, 0x47, 0x53, 0x4d, 0x34, 0x34, 0x42, + 0x41, 0x45, 0x77, 0x67, 0x67, 0x45, 0x65, 0x41, 0x6f, 0x47, 0x42, 0x41, + 0x4a, 0x6e, 0x32, 0x49, 0x38, 0x59, 0x65, 0x66, 0x52, 0x6f, 0x33, 0x42, + 0x73, 0x45, 0x65, 0x69, 0x6e, 0x51, 0x74, 0x38, 0x4b, 0x51, 0x34, 0x63, + 0x45, 0x79, 0x41, 0x72, 0x41, 0x73, 0x37, 0x0a, 0x59, 0x2f, 0x57, 0x37, + 0x33, 0x33, 0x6f, 0x52, 0x53, 0x59, 0x4f, 0x49, 0x7a, 0x57, 0x46, 0x31, + 0x4a, 0x75, 0x31, 0x32, 0x34, 0x79, 0x73, 0x4b, 0x72, 0x6d, 0x67, 0x32, + 0x6f, 0x6b, 0x76, 0x2b, 0x30, 0x35, 0x43, 0x59, 0x63, 0x6a, 0x56, 0x33, + 0x59, 0x70, 0x34, 0x41, 0x7a, 0x51, 0x65, 0x6f, 0x6d, 0x59, 0x41, 0x6c, + 0x67, 0x6d, 0x42, 0x2f, 0x37, 0x78, 0x43, 0x45, 0x6e, 0x57, 0x61, 0x45, + 0x0a, 0x6e, 0x78, 0x43, 0x77, 0x41, 0x78, 0x6d, 0x72, 0x72, 0x4a, 0x4d, + 0x6d, 0x50, 0x72, 0x6b, 0x77, 0x4e, 0x6a, 0x48, 0x4f, 0x49, 0x69, 0x37, + 0x79, 0x4d, 0x35, 0x51, 0x4f, 0x45, 0x39, 0x30, 0x49, 0x4d, 0x2f, 0x51, + 0x2b, 0x49, 0x4a, 0x41, 0x34, 0x45, 0x50, 0x42, 0x66, 0x53, 0x62, 0x2b, + 0x58, 0x72, 0x38, 0x66, 0x59, 0x68, 0x72, 0x70, 0x35, 0x33, 0x4b, 0x4e, + 0x48, 0x56, 0x53, 0x6e, 0x63, 0x0a, 0x32, 0x4b, 0x6b, 0x4f, 0x71, 0x70, + 0x6f, 0x32, 0x46, 0x73, 0x49, 0x6a, 0x41, 0x68, 0x55, 0x41, 0x75, 0x44, + 0x5a, 0x54, 0x2b, 0x64, 0x42, 0x71, 0x73, 0x6c, 0x4b, 0x71, 0x74, 0x67, + 0x66, 0x4e, 0x6c, 0x59, 0x66, 0x38, 0x4f, 0x59, 0x39, 0x30, 0x38, 0x38, + 0x45, 0x43, 0x67, 0x59, 0x41, 0x65, 0x53, 0x47, 0x6f, 0x6d, 0x39, 0x4c, + 0x4c, 0x4e, 0x64, 0x7a, 0x63, 0x77, 0x43, 0x48, 0x6e, 0x47, 0x0a, 0x66, + 0x78, 0x4b, 0x4e, 0x6e, 0x45, 0x7a, 0x33, 0x35, 0x35, 0x4b, 0x49, 0x54, + 0x41, 0x44, 0x54, 0x78, 0x69, 0x49, 0x70, 0x42, 0x76, 0x6e, 0x51, 0x57, + 0x2b, 0x65, 0x44, 0x48, 0x77, 0x51, 0x76, 0x49, 0x77, 0x36, 0x56, 0x32, + 0x4f, 0x63, 0x37, 0x33, 0x62, 0x4b, 0x43, 0x75, 0x35, 0x5a, 0x69, 0x72, + 0x5a, 0x6d, 0x49, 0x4d, 0x57, 0x35, 0x77, 0x36, 0x4b, 0x6a, 0x51, 0x56, + 0x77, 0x6b, 0x75, 0x0a, 0x51, 0x42, 0x6f, 0x46, 0x39, 0x4b, 0x6f, 0x71, + 0x2f, 0x32, 0x75, 0x36, 0x56, 0x65, 0x65, 0x76, 0x74, 0x4c, 0x39, 0x70, + 0x44, 0x36, 0x54, 0x42, 0x7a, 0x53, 0x4c, 0x4d, 0x56, 0x77, 0x35, 0x70, + 0x56, 0x33, 0x50, 0x6d, 0x45, 0x34, 0x2f, 0x43, 0x2f, 0x65, 0x4c, 0x69, + 0x61, 0x55, 0x78, 0x5a, 0x4c, 0x49, 0x48, 0x64, 0x62, 0x7a, 0x70, 0x71, + 0x50, 0x6b, 0x41, 0x76, 0x41, 0x55, 0x42, 0x72, 0x0a, 0x58, 0x4b, 0x6b, + 0x6a, 0x30, 0x69, 0x6a, 0x7a, 0x63, 0x4e, 0x7a, 0x43, 0x70, 0x31, 0x66, + 0x75, 0x46, 0x38, 0x48, 0x30, 0x70, 0x76, 0x52, 0x38, 0x79, 0x51, 0x4f, + 0x42, 0x68, 0x41, 0x41, 0x43, 0x67, 0x59, 0x41, 0x6d, 0x76, 0x56, 0x2b, + 0x6b, 0x71, 0x57, 0x68, 0x55, 0x67, 0x44, 0x59, 0x77, 0x4e, 0x4e, 0x7a, + 0x31, 0x71, 0x44, 0x61, 0x6f, 0x53, 0x38, 0x58, 0x64, 0x73, 0x4f, 0x70, + 0x6f, 0x0a, 0x6e, 0x75, 0x74, 0x5a, 0x2f, 0x30, 0x73, 0x74, 0x52, 0x51, + 0x36, 0x36, 0x6d, 0x4b, 0x41, 0x79, 0x38, 0x6b, 0x4e, 0x56, 0x4e, 0x4e, + 0x51, 0x36, 0x6f, 0x55, 0x78, 0x31, 0x58, 0x46, 0x6c, 0x31, 0x57, 0x55, + 0x74, 0x34, 0x69, 0x79, 0x46, 0x59, 0x2f, 0x32, 0x52, 0x7a, 0x32, 0x66, + 0x5a, 0x68, 0x4c, 0x7a, 0x35, 0x2f, 0x54, 0x62, 0x5a, 0x52, 0x4b, 0x35, + 0x79, 0x67, 0x6f, 0x36, 0x36, 0x36, 0x0a, 0x57, 0x67, 0x6e, 0x78, 0x42, + 0x2f, 0x55, 0x64, 0x34, 0x47, 0x41, 0x78, 0x2f, 0x42, 0x50, 0x51, 0x54, + 0x67, 0x68, 0x4f, 0x4a, 0x4a, 0x4f, 0x4c, 0x30, 0x30, 0x76, 0x4a, 0x6b, + 0x2b, 0x38, 0x6a, 0x56, 0x43, 0x47, 0x4e, 0x44, 0x63, 0x39, 0x34, 0x32, + 0x56, 0x36, 0x6e, 0x46, 0x58, 0x7a, 0x6e, 0x44, 0x4d, 0x58, 0x77, 0x71, + 0x78, 0x68, 0x52, 0x43, 0x57, 0x36, 0x64, 0x6d, 0x2b, 0x32, 0x6c, 0x0a, + 0x54, 0x68, 0x37, 0x6e, 0x74, 0x72, 0x6c, 0x69, 0x38, 0x6d, 0x43, 0x6b, + 0x35, 0x67, 0x3d, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, + 0x44, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + ppk: true, + privateMAC: 'c07e1d3ae080613e7cdb63ab06bde2e805bc420b', + _converted: true, + _macresult: true + }, + what: 'Unencrypted DSA key (PPK format)' + }, + { source: [ + 'PuTTY-User-Key-File-2: ssh-dss', + 'Encryption: aes256-cbc', + 'Comment: dsa-key-20141202', + 'Public-Lines: 10', + 'AAAAB3NzaC1kc3MAAACBAJn2I8YefRo3BsEeinQt8KQ4cEyArAs7Y/W733oRSYOI', + 'zWF1Ju124ysKrmg2okv+05CYcjV3Yp4AzQeomYAlgmB/7xCEnWaEnxCwAxmrrJMm', + 'PrkwNjHOIi7yM5QOE90IM/Q+IJA4EPBfSb+Xr8fYhrp53KNHVSnc2KkOqpo2FsIj', + 'AAAAFQC4NlP50GqyUqq2B82Vh/w5j3TzwQAAAIAeSGom9LLNdzcwCHnGfxKNnEz3', + '55KITADTxiIpBvnQW+eDHwQvIw6V2Oc73bKCu5ZirZmIMW5w6KjQVwkuQBoF9Koq', + '/2u6VeevtL9pD6TBzSLMVw5pV3PmE4/C/eLiaUxZLIHdbzpqPkAvAUBrXKkj0ijz', + 'cNzCp1fuF8H0pvR8yQAAAIAmvV+kqWhUgDYwNNz1qDaoS8XdsOponutZ/0stRQ66', + 'mKAy8kNVNNQ6oUx1XFl1WUt4iyFY/2Rz2fZhLz5/TbZRK5ygo666WgnxB/Ud4GAx', + '/BPQTghOJJOL00vJk+8jVCGNDc942V6nFXznDMXwqxhRCW6dm+2lTh7ntrli8mCk', + '5g==', + 'Private-Lines: 1', + 'BytvbK+jNyMjiVxCO5lcE4YbW7q293oC+LZjkZ8Ajlw=', + 'Private-MAC: c3da536ea28851fc32d5d1ff01498c8fcebc1170' + ].join('\n'), + expected: { + fulltype: 'ssh-dss', + type: 'dss', + curve: undefined, + extra: undefined, + comment: 'dsa-key-20141202', + encryption: 'aes256-cbc', + private: new Buffer([ + 0x07, 0x2b, 0x6f, 0x6c, 0xaf, 0xa3, 0x37, 0x23, 0x23, 0x89, 0x5c, 0x42, + 0x3b, 0x99, 0x5c, 0x13, 0x86, 0x1b, 0x5b, 0xba, 0xb6, 0xf7, 0x7a, 0x02, + 0xf8, 0xb6, 0x63, 0x91, 0x9f, 0x00, 0x8e, 0x5c, + ]), + privateOrig: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, 0x00, + 0x00, 0x00, 0x81, 0x00, 0x99, 0xf6, 0x23, 0xc6, 0x1e, 0x7d, 0x1a, 0x37, + 0x06, 0xc1, 0x1e, 0x8a, 0x74, 0x2d, 0xf0, 0xa4, 0x38, 0x70, 0x4c, 0x80, + 0xac, 0x0b, 0x3b, 0x63, 0xf5, 0xbb, 0xdf, 0x7a, 0x11, 0x49, 0x83, 0x88, + 0xcd, 0x61, 0x75, 0x26, 0xed, 0x76, 0xe3, 0x2b, 0x0a, 0xae, 0x68, 0x36, + 0xa2, 0x4b, 0xfe, 0xd3, 0x90, 0x98, 0x72, 0x35, 0x77, 0x62, 0x9e, 0x00, + 0xcd, 0x07, 0xa8, 0x99, 0x80, 0x25, 0x82, 0x60, 0x7f, 0xef, 0x10, 0x84, + 0x9d, 0x66, 0x84, 0x9f, 0x10, 0xb0, 0x03, 0x19, 0xab, 0xac, 0x93, 0x26, + 0x3e, 0xb9, 0x30, 0x36, 0x31, 0xce, 0x22, 0x2e, 0xf2, 0x33, 0x94, 0x0e, + 0x13, 0xdd, 0x08, 0x33, 0xf4, 0x3e, 0x20, 0x90, 0x38, 0x10, 0xf0, 0x5f, + 0x49, 0xbf, 0x97, 0xaf, 0xc7, 0xd8, 0x86, 0xba, 0x79, 0xdc, 0xa3, 0x47, + 0x55, 0x29, 0xdc, 0xd8, 0xa9, 0x0e, 0xaa, 0x9a, 0x36, 0x16, 0xc2, 0x23, + 0x00, 0x00, 0x00, 0x15, 0x00, 0xb8, 0x36, 0x53, 0xf9, 0xd0, 0x6a, 0xb2, + 0x52, 0xaa, 0xb6, 0x07, 0xcd, 0x95, 0x87, 0xfc, 0x39, 0x8f, 0x74, 0xf3, + 0xc1, 0x00, 0x00, 0x00, 0x80, 0x1e, 0x48, 0x6a, 0x26, 0xf4, 0xb2, 0xcd, + 0x77, 0x37, 0x30, 0x08, 0x79, 0xc6, 0x7f, 0x12, 0x8d, 0x9c, 0x4c, 0xf7, + 0xe7, 0x92, 0x88, 0x4c, 0x00, 0xd3, 0xc6, 0x22, 0x29, 0x06, 0xf9, 0xd0, + 0x5b, 0xe7, 0x83, 0x1f, 0x04, 0x2f, 0x23, 0x0e, 0x95, 0xd8, 0xe7, 0x3b, + 0xdd, 0xb2, 0x82, 0xbb, 0x96, 0x62, 0xad, 0x99, 0x88, 0x31, 0x6e, 0x70, + 0xe8, 0xa8, 0xd0, 0x57, 0x09, 0x2e, 0x40, 0x1a, 0x05, 0xf4, 0xaa, 0x2a, + 0xff, 0x6b, 0xba, 0x55, 0xe7, 0xaf, 0xb4, 0xbf, 0x69, 0x0f, 0xa4, 0xc1, + 0xcd, 0x22, 0xcc, 0x57, 0x0e, 0x69, 0x57, 0x73, 0xe6, 0x13, 0x8f, 0xc2, + 0xfd, 0xe2, 0xe2, 0x69, 0x4c, 0x59, 0x2c, 0x81, 0xdd, 0x6f, 0x3a, 0x6a, + 0x3e, 0x40, 0x2f, 0x01, 0x40, 0x6b, 0x5c, 0xa9, 0x23, 0xd2, 0x28, 0xf3, + 0x70, 0xdc, 0xc2, 0xa7, 0x57, 0xee, 0x17, 0xc1, 0xf4, 0xa6, 0xf4, 0x7c, + 0xc9, 0x00, 0x00, 0x00, 0x80, 0x26, 0xbd, 0x5f, 0xa4, 0xa9, 0x68, 0x54, + 0x80, 0x36, 0x30, 0x34, 0xdc, 0xf5, 0xa8, 0x36, 0xa8, 0x4b, 0xc5, 0xdd, + 0xb0, 0xea, 0x68, 0x9e, 0xeb, 0x59, 0xff, 0x4b, 0x2d, 0x45, 0x0e, 0xba, + 0x98, 0xa0, 0x32, 0xf2, 0x43, 0x55, 0x34, 0xd4, 0x3a, 0xa1, 0x4c, 0x75, + 0x5c, 0x59, 0x75, 0x59, 0x4b, 0x78, 0x8b, 0x21, 0x58, 0xff, 0x64, 0x73, + 0xd9, 0xf6, 0x61, 0x2f, 0x3e, 0x7f, 0x4d, 0xb6, 0x51, 0x2b, 0x9c, 0xa0, + 0xa3, 0xae, 0xba, 0x5a, 0x09, 0xf1, 0x07, 0xf5, 0x1d, 0xe0, 0x60, 0x31, + 0xfc, 0x13, 0xd0, 0x4e, 0x08, 0x4e, 0x24, 0x93, 0x8b, 0xd3, 0x4b, 0xc9, + 0x93, 0xef, 0x23, 0x54, 0x21, 0x8d, 0x0d, 0xcf, 0x78, 0xd9, 0x5e, 0xa7, + 0x15, 0x7c, 0xe7, 0x0c, 0xc5, 0xf0, 0xab, 0x18, 0x51, 0x09, 0x6e, 0x9d, + 0x9b, 0xed, 0xa5, 0x4e, 0x1e, 0xe7, 0xb6, 0xb9, 0x62, 0xf2, 0x60, 0xa4, + 0xe6, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x50, + 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, + 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x42, 0x74, 0x6a, 0x43, 0x43, 0x41, + 0x53, 0x73, 0x47, 0x42, 0x79, 0x71, 0x47, 0x53, 0x4d, 0x34, 0x34, 0x42, + 0x41, 0x45, 0x77, 0x67, 0x67, 0x45, 0x65, 0x41, 0x6f, 0x47, 0x42, 0x41, + 0x4a, 0x6e, 0x32, 0x49, 0x38, 0x59, 0x65, 0x66, 0x52, 0x6f, 0x33, 0x42, + 0x73, 0x45, 0x65, 0x69, 0x6e, 0x51, 0x74, 0x38, 0x4b, 0x51, 0x34, 0x63, + 0x45, 0x79, 0x41, 0x72, 0x41, 0x73, 0x37, 0x0a, 0x59, 0x2f, 0x57, 0x37, + 0x33, 0x33, 0x6f, 0x52, 0x53, 0x59, 0x4f, 0x49, 0x7a, 0x57, 0x46, 0x31, + 0x4a, 0x75, 0x31, 0x32, 0x34, 0x79, 0x73, 0x4b, 0x72, 0x6d, 0x67, 0x32, + 0x6f, 0x6b, 0x76, 0x2b, 0x30, 0x35, 0x43, 0x59, 0x63, 0x6a, 0x56, 0x33, + 0x59, 0x70, 0x34, 0x41, 0x7a, 0x51, 0x65, 0x6f, 0x6d, 0x59, 0x41, 0x6c, + 0x67, 0x6d, 0x42, 0x2f, 0x37, 0x78, 0x43, 0x45, 0x6e, 0x57, 0x61, 0x45, + 0x0a, 0x6e, 0x78, 0x43, 0x77, 0x41, 0x78, 0x6d, 0x72, 0x72, 0x4a, 0x4d, + 0x6d, 0x50, 0x72, 0x6b, 0x77, 0x4e, 0x6a, 0x48, 0x4f, 0x49, 0x69, 0x37, + 0x79, 0x4d, 0x35, 0x51, 0x4f, 0x45, 0x39, 0x30, 0x49, 0x4d, 0x2f, 0x51, + 0x2b, 0x49, 0x4a, 0x41, 0x34, 0x45, 0x50, 0x42, 0x66, 0x53, 0x62, 0x2b, + 0x58, 0x72, 0x38, 0x66, 0x59, 0x68, 0x72, 0x70, 0x35, 0x33, 0x4b, 0x4e, + 0x48, 0x56, 0x53, 0x6e, 0x63, 0x0a, 0x32, 0x4b, 0x6b, 0x4f, 0x71, 0x70, + 0x6f, 0x32, 0x46, 0x73, 0x49, 0x6a, 0x41, 0x68, 0x55, 0x41, 0x75, 0x44, + 0x5a, 0x54, 0x2b, 0x64, 0x42, 0x71, 0x73, 0x6c, 0x4b, 0x71, 0x74, 0x67, + 0x66, 0x4e, 0x6c, 0x59, 0x66, 0x38, 0x4f, 0x59, 0x39, 0x30, 0x38, 0x38, + 0x45, 0x43, 0x67, 0x59, 0x41, 0x65, 0x53, 0x47, 0x6f, 0x6d, 0x39, 0x4c, + 0x4c, 0x4e, 0x64, 0x7a, 0x63, 0x77, 0x43, 0x48, 0x6e, 0x47, 0x0a, 0x66, + 0x78, 0x4b, 0x4e, 0x6e, 0x45, 0x7a, 0x33, 0x35, 0x35, 0x4b, 0x49, 0x54, + 0x41, 0x44, 0x54, 0x78, 0x69, 0x49, 0x70, 0x42, 0x76, 0x6e, 0x51, 0x57, + 0x2b, 0x65, 0x44, 0x48, 0x77, 0x51, 0x76, 0x49, 0x77, 0x36, 0x56, 0x32, + 0x4f, 0x63, 0x37, 0x33, 0x62, 0x4b, 0x43, 0x75, 0x35, 0x5a, 0x69, 0x72, + 0x5a, 0x6d, 0x49, 0x4d, 0x57, 0x35, 0x77, 0x36, 0x4b, 0x6a, 0x51, 0x56, + 0x77, 0x6b, 0x75, 0x0a, 0x51, 0x42, 0x6f, 0x46, 0x39, 0x4b, 0x6f, 0x71, + 0x2f, 0x32, 0x75, 0x36, 0x56, 0x65, 0x65, 0x76, 0x74, 0x4c, 0x39, 0x70, + 0x44, 0x36, 0x54, 0x42, 0x7a, 0x53, 0x4c, 0x4d, 0x56, 0x77, 0x35, 0x70, + 0x56, 0x33, 0x50, 0x6d, 0x45, 0x34, 0x2f, 0x43, 0x2f, 0x65, 0x4c, 0x69, + 0x61, 0x55, 0x78, 0x5a, 0x4c, 0x49, 0x48, 0x64, 0x62, 0x7a, 0x70, 0x71, + 0x50, 0x6b, 0x41, 0x76, 0x41, 0x55, 0x42, 0x72, 0x0a, 0x58, 0x4b, 0x6b, + 0x6a, 0x30, 0x69, 0x6a, 0x7a, 0x63, 0x4e, 0x7a, 0x43, 0x70, 0x31, 0x66, + 0x75, 0x46, 0x38, 0x48, 0x30, 0x70, 0x76, 0x52, 0x38, 0x79, 0x51, 0x4f, + 0x42, 0x68, 0x41, 0x41, 0x43, 0x67, 0x59, 0x41, 0x6d, 0x76, 0x56, 0x2b, + 0x6b, 0x71, 0x57, 0x68, 0x55, 0x67, 0x44, 0x59, 0x77, 0x4e, 0x4e, 0x7a, + 0x31, 0x71, 0x44, 0x61, 0x6f, 0x53, 0x38, 0x58, 0x64, 0x73, 0x4f, 0x70, + 0x6f, 0x0a, 0x6e, 0x75, 0x74, 0x5a, 0x2f, 0x30, 0x73, 0x74, 0x52, 0x51, + 0x36, 0x36, 0x6d, 0x4b, 0x41, 0x79, 0x38, 0x6b, 0x4e, 0x56, 0x4e, 0x4e, + 0x51, 0x36, 0x6f, 0x55, 0x78, 0x31, 0x58, 0x46, 0x6c, 0x31, 0x57, 0x55, + 0x74, 0x34, 0x69, 0x79, 0x46, 0x59, 0x2f, 0x32, 0x52, 0x7a, 0x32, 0x66, + 0x5a, 0x68, 0x4c, 0x7a, 0x35, 0x2f, 0x54, 0x62, 0x5a, 0x52, 0x4b, 0x35, + 0x79, 0x67, 0x6f, 0x36, 0x36, 0x36, 0x0a, 0x57, 0x67, 0x6e, 0x78, 0x42, + 0x2f, 0x55, 0x64, 0x34, 0x47, 0x41, 0x78, 0x2f, 0x42, 0x50, 0x51, 0x54, + 0x67, 0x68, 0x4f, 0x4a, 0x4a, 0x4f, 0x4c, 0x30, 0x30, 0x76, 0x4a, 0x6b, + 0x2b, 0x38, 0x6a, 0x56, 0x43, 0x47, 0x4e, 0x44, 0x63, 0x39, 0x34, 0x32, + 0x56, 0x36, 0x6e, 0x46, 0x58, 0x7a, 0x6e, 0x44, 0x4d, 0x58, 0x77, 0x71, + 0x78, 0x68, 0x52, 0x43, 0x57, 0x36, 0x64, 0x6d, 0x2b, 0x32, 0x6c, 0x0a, + 0x54, 0x68, 0x37, 0x6e, 0x74, 0x72, 0x6c, 0x69, 0x38, 0x6d, 0x43, 0x6b, + 0x35, 0x67, 0x3d, 0x3d, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x45, 0x4e, + 0x44, 0x20, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, + ]), + ppk: true, + privateMAC: 'c3da536ea28851fc32d5d1ff01498c8fcebc1170' + }, + what: 'Encrypted DSA key (PPK format)' + }, +].forEach(function(v) { + require('buffer').INSPECT_MAX_BYTES = Infinity; + var result; + try { + result = parser(v.source); + } catch (ex) { + console.log('Exception encountered for: ' + v.what + '\n' + ex.stack); + process.exit(1); + } + /*if (result.private) { + for (var i = 0; i < result.private.length; ++i) { + if (i % 12 === 0) + process.stdout.write('\n'); + process.stdout.write('0x' + (result.private[i] < 16 ? '0' : '') + result.private[i].toString(16) + ', '); + } + } + console.log('');*/ + assert.deepEqual(result, + v.expected, + '[' + group + v.what + ']: parser output mismatch.' + + '\nSaw: ' + inspect(result) + + '\nExpected: ' + inspect(v.expected)); +}); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-packet60.js b/reverse_engineering/node_modules/ssh2-streams/test/test-packet60.js new file mode 100644 index 0000000..eba9b79 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-packet60.js @@ -0,0 +1,256 @@ +var SSH2Stream = require('../lib/ssh'); +var parseKey = require('../lib/utils').parseKey; +var genPubKey = require('../lib/utils').genPublicKey; + +var assert = require('assert'); +var crypto = require('crypto'); +var fs = require('fs'); + +var t = -1; +var SERVER_PRV_KEY = fs.readFileSync(__dirname + '/fixtures/ssh_host_rsa_key'); +var PARSED_SERVER_PRV_KEY = parseKey(SERVER_PRV_KEY); +var PARSED_SERVER_PUB_KEY = genPubKey(PARSED_SERVER_PRV_KEY); +var CLIENT_PRV_KEY = fs.readFileSync(__dirname + '/fixtures/id_rsa'); +var PARSED_CLIENT_PRV_KEY = parseKey(CLIENT_PRV_KEY); +var PARSED_CLIENT_PUB_KEY = genPubKey(PARSED_CLIENT_PRV_KEY); + +function makePair(cb) { + var server = new SSH2Stream({ + server: true, + hostKeys: { + 'ssh-rsa': { + privateKey: PARSED_SERVER_PRV_KEY, + publicKey: PARSED_SERVER_PUB_KEY + } + } + }); + var client = new SSH2Stream(); + + var done = []; + function tryDone(who) { + done.push(who); + if (done.length !== 2) + return; + cb(server, client); + } + + server.on('NEWKEYS', function () { tryDone('server'); }); + client.on('NEWKEYS', function () { tryDone('client'); }); + server.pipe(client).pipe(server); +} + +function signWithClientKey(blob, syncCb) { + var signType = (PARSED_CLIENT_PRV_KEY.type === 'rsa' ? 'R' : 'D') + 'SA-SHA1'; + var signature = crypto.createSign(signType); + signature.update(blob); + signature = signature.sign(PARSED_CLIENT_PRV_KEY.privateOrig); + syncCb(signature); +} + +function bufferEqual(a, b) { + if (a.length !== b.length) + return false; + for (var i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +function publickey(server, client) { + server.on('USERAUTH_REQUEST', function(user, service, method, data) { + assert.equal(user, 'bob'); + assert.equal(service, 'ssh-connection'); + assert.equal(method, 'publickey'); + assert.equal(data.keyAlgo, PARSED_CLIENT_PUB_KEY.fulltype); + assert.equal(true, bufferEqual(data.key, PARSED_CLIENT_PUB_KEY.public)); + assert.equal(data.signature, undefined); + assert.equal(data.blob, undefined); + return server.authPKOK(data.keyAlgo, data.key); + }); + client.on('USERAUTH_PK_OK', function() { + next(); + }).authPK('bob', PARSED_CLIENT_PUB_KEY); +} + +function keyboardInteractive(server, client) { + var infoReqsRxed = 0; + + server.on('USERAUTH_REQUEST', function(user, service, method, data) { + assert.equal(user, 'bob'); + assert.equal(service, 'ssh-connection'); + assert.equal(method, 'keyboard-interactive'); + assert.equal(data, ''); + process.nextTick(function() { + server.authInfoReq('req 0', 'instructions', [ + { prompt: 'Say something to req 0', echo: true } + ]); + }); + }).on('USERAUTH_INFO_RESPONSE', function(responses) { + if (infoReqsRxed === 1) { + assert.equal(responses.length, 1); + assert.equal(responses[0], 'hello to req 0'); + process.nextTick(function() { + server.authInfoReq('req 1', 'new instructions', [ + { prompt: 'Say something to req 1', echo: true }, + { prompt: 'Say something else', echo: false } + ]); + }); + } else if (infoReqsRxed === 2) { + assert.equal(responses.length, 2); + assert.equal(responses[0], 'hello to req 1'); + assert.equal(responses[1], 'something else'); + next(); + } else { + throw new Error('Received too many info reqs: ' + infoReqsRxed); + } + }); + + client.on('USERAUTH_INFO_REQUEST', function (name, inst, lang, prompts) { + infoReqsRxed++; + if (infoReqsRxed === 1) { + assert.equal(name, 'req 0'); + assert.equal(inst, 'instructions'); + assert.equal(lang, ''); + assert.deepEqual(prompts, [ + { prompt: 'Say something to req 0', echo: true } + ]); + process.nextTick(function() { + client.authInfoRes([ 'hello to req 0' ]); + }); + } else if (infoReqsRxed === 2) { + assert.equal(name, 'req 1'); + assert.equal(inst, 'new instructions'); + assert.equal(lang, ''); + assert.deepEqual(prompts, [ + { prompt: 'Say something to req 1', echo: true }, + { prompt: 'Say something else', echo: false } + ]); + process.nextTick(function() { + client.authInfoRes([ 'hello to req 1', 'something else' ]); + }); + } else { + throw new Error('Received too many info reqs: ' + infoReqsRxed); + } + }).authKeyboard('bob'); +} + +function mixedMethods(server, client) { + var expectedStages = [ + 'SERVER_SEES_PK_CHECK', + 'SERVER_SEES_PK_REQUEST', + 'SERVER_SEES_PASSWORD', + 'SERVER_SEES_KEYBOARD_INTERACTIVE', + 'CLIENT_SEES_PK_OK', + 'CLIENT_SEES_USERAUTH_FAILURE_PK', + 'CLIENT_SEES_USERAUTH_FAILURE_PASSWORD', + 'CLIENT_SEES_KEYBOARD_REQ', + 'SERVER_SEES_KEYBOARD_RES', + 'CLIENT_SEES_USERAUTH_SUCCESS', + ]; + + server.on('USERAUTH_REQUEST', function(name, service, method, data) { + assert.equal(name, 'bob'); + assert.equal(service, 'ssh-connection'); + var expectedStage = expectedStages.shift(); + switch (expectedStage) { + case 'SERVER_SEES_PK_CHECK': + assert.equal(method, 'publickey'); + assert.equal(data.signature, undefined); + return process.nextTick(function() { + server.authPKOK(data.keyAlgo, data.key); + }); + case 'SERVER_SEES_PK_REQUEST': + assert.equal(method, 'publickey'); + assert.notEqual(data.signature, undefined); + return process.nextTick(function() { + server.authFailure( + ['publickey', 'password', 'keyboard-interactive'], + false + ); + }); + case 'SERVER_SEES_PASSWORD': + assert.equal(method, 'password'); + assert.equal(data, 'seekrit'); + return process.nextTick(function() { + server.authFailure( + ['publickey', 'password', 'keyboard-interactive'], + false + ); + }); + case 'SERVER_SEES_KEYBOARD_INTERACTIVE': + assert.equal(method, 'keyboard-interactive'); + assert.equal(data, ''); + return process.nextTick(function() { + server.authInfoReq('Password required', 'Password prompt', [ + { prompt: 'Password:', echo: false } + ]); + }); + default: + throw new Error('Server saw USERAUTH_REQUEST ' + method + + ' but expected ' + expectedStage); + } + }).on('USERAUTH_INFO_RESPONSE', function(responses) { + assert.equal(expectedStages.shift(), 'SERVER_SEES_KEYBOARD_RES'); + assert.deepEqual(responses, [ 'seekrit' ]); + process.nextTick(function() { + server.authSuccess(); + }); + }); + + + client.on('USERAUTH_PK_OK', function() { + assert.equal(expectedStages.shift(), 'CLIENT_SEES_PK_OK'); + }).on('USERAUTH_FAILURE', function() { + var expectedStage = expectedStages.shift(); + if (expectedStage !== 'CLIENT_SEES_USERAUTH_FAILURE_PK' && + expectedStage !== 'CLIENT_SEES_USERAUTH_FAILURE_PASSWORD') { + throw new Error('Client saw USERAUTH_FAILURE but expected ' + + expectedStage); + } + }).on('USERAUTH_INFO_REQUEST', function(name, inst, lang, prompts) { + assert.equal(expectedStages.shift(), 'CLIENT_SEES_KEYBOARD_REQ'); + assert.equal(name, 'Password required'); + assert.equal(inst, 'Password prompt'); + assert.equal(lang, ''); + assert.deepEqual(prompts, [ { prompt: 'Password:', echo: false } ]); + process.nextTick(function() { + client.authInfoRes([ 'seekrit' ]); + }); + }).on('USERAUTH_SUCCESS', function() { + assert.equal(expectedStages.shift(), 'CLIENT_SEES_USERAUTH_SUCCESS'); + assert.equal(expectedStages.shift(), undefined); + next(); + }); + + // Silly to submit all these auths at once, but allowed by RFC4252 + client.authPK('bob', PARSED_CLIENT_PUB_KEY); + client.authPK('bob', PARSED_CLIENT_PUB_KEY, signWithClientKey); + client.authPassword('bob', 'seekrit'); + client.authKeyboard('bob'); +} + +var tests = [ + publickey, + keyboardInteractive, + // password // ssh2-streams can't generate a password change request + mixedMethods +]; + + +function next() { + if (Array.isArray(process._events.exit)) + process._events.exit = process._events.exit[1]; + if (++t === tests.length) + return; + + var v = tests[t]; + makePair(v); +} + +process.once('exit', function() { + assert(t === tests.length, + 'Only finished ' + t + '/' + tests.length + ' tests'); +}); + +next(); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-sftp.js b/reverse_engineering/node_modules/ssh2-streams/test/test-sftp.js new file mode 100644 index 0000000..03a3093 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-sftp.js @@ -0,0 +1,1274 @@ +var SFTPStream = require('../lib/sftp'); +var Stats = SFTPStream.Stats; +var STATUS_CODE = SFTPStream.STATUS_CODE; +var OPEN_MODE = SFTPStream.OPEN_MODE; + +var constants = require('constants'); +var basename = require('path').basename; +var assert = require('assert'); + +var group = basename(__filename, '.js') + '/'; +var t = -1; + +var tests = [ +// successful client requests + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/tmp/foo.txt'; + var handle_ = new Buffer('node.js'); + server.on('OPEN', function(id, path, pflags, attrs) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert(pflags === (OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.WRITE), + makeMsg(what, 'Wrong flags: ' + flagsToHuman(pflags))); + server.handle(id, handle_); + server.end(); + }); + client.open(path_, 'w', function(err, handle) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected open() error: ' + err)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + }); + }; + }, + what: 'open' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + server.on('CLOSE', function(id, handle) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.close(handle_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected close() error: ' + err)); + }); + }; + }, + what: 'close' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var expected = new Buffer('node.jsnode.jsnode.jsnode.jsnode.jsnode.js'); + var buffer = new Buffer(expected.length); + buffer.fill(0); + server.on('READ', function(id, handle, offset, len) { + assert(++self.state.requests <= 2, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert(offset === 5, makeMsg(what, 'Wrong read offset: ' + offset)); + assert(len === buffer.length, makeMsg(what, 'Wrong read len: ' + len)); + server.data(id, expected); + server.end(); + }); + client.readData(handle_, buffer, 0, buffer.length, 5, clientReadCb); + function clientReadCb(err, code) { + assert(++self.state.responses <= 2, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected readData() error: ' + err)); + assert.deepEqual(buffer, + expected, + makeMsg(what, 'read data mismatch')); + } + }; + }, + what: 'readData' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var buf = new Buffer('node.jsnode.jsnode.jsnode.jsnode.jsnode.js'); + server.on('WRITE', function(id, handle, offset, data) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert(offset === 5, makeMsg(what, 'Wrong write offset: ' + offset)); + assert.deepEqual(data, buf, makeMsg(what, 'write data mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.writeData(handle_, buf, 0, buf.length, 5, function(err, nb) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected writeData() error: ' + err)); + assert.equal(nb, buf.length); + }); + }; + }, + what: 'write' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var buf = new Buffer(3 * 32 * 1024); + server.on('WRITE', function(id, handle, offset, data) { + ++self.state.requests; + assert.equal(id, + self.state.requests - 1, + makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert.equal(offset, + (self.state.requests - 1) * 32 * 1024, + makeMsg(what, 'Wrong write offset: ' + offset)); + assert((offset + data.length) <= buf.length); + assert.deepEqual(data, + buf.slice(offset, offset + data.length), + makeMsg(what, 'write data mismatch')); + server.status(id, STATUS_CODE.OK); + if (self.state.requests === 3) + server.end(); + }); + client.writeData(handle_, buf, 0, buf.length, 0, function(err, nb) { + ++self.state.responses; + assert(!err, makeMsg(what, 'Unexpected writeData() error: ' + err)); + assert.equal(nb, buf.length); + }); + }; + }, + expected: { + requests: 3, + responses: 1 + }, + what: 'write (overflow)' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + var attrs_ = new Stats({ + size: 10 * 1024, + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.on('LSTAT', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.attrs(id, attrs_); + server.end(); + }); + client.lstat(path_, function(err, attrs) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected lstat() error: ' + err)); + assert.deepEqual(attrs, attrs_, makeMsg(what, 'attrs mismatch')); + }); + }; + }, + what: 'lstat' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var attrs_ = new Stats({ + size: 10 * 1024, + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.on('FSTAT', function(id, handle) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.attrs(id, attrs_); + server.end(); + }); + client.fstat(handle_, function(err, attrs) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected fstat() error: ' + err)); + assert.deepEqual(attrs, attrs_, makeMsg(what, 'attrs mismatch')); + }); + }; + }, + what: 'fstat' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + var attrs_ = new Stats({ + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.on('SETSTAT', function(id, path, attrs) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert.deepEqual(attrs, attrs_, makeMsg(what, 'attrs mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.setstat(path_, attrs_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected setstat() error: ' + err)); + }); + }; + }, + what: 'setstat' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var attrs_ = new Stats({ + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.on('FSETSTAT', function(id, handle, attrs) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert.deepEqual(attrs, attrs_, makeMsg(what, 'attrs mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.fsetstat(handle_, attrs_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected fsetstat() error: ' + err)); + }); + }; + }, + what: 'fsetstat' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var path_ = '/tmp'; + server.on('OPENDIR', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.handle(id, handle_); + server.end(); + }); + client.opendir(path_, function(err, handle) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected opendir() error: ' + err)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + }); + }; + }, + what: 'opendir' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var list_ = [ + { filename: '.', + longname: 'drwxr-xr-x 56 nodejs nodejs 4096 Nov 10 01:05 .', + attrs: new Stats({ + mode: 0755 | constants.S_IFDIR, + size: 4096, + uid: 9001, + gid: 8001, + atime: 1415599549, + mtime: 1415599590 + }) + }, + { filename: '..', + longname: 'drwxr-xr-x 4 root root 4096 May 16 2013 ..', + attrs: new Stats({ + mode: 0755 | constants.S_IFDIR, + size: 4096, + uid: 0, + gid: 0, + atime: 1368729954, + mtime: 1368729999 + }) + }, + { filename: 'foo', + longname: 'drwxrwxrwx 2 nodejs nodejs 4096 Mar 8 2009 foo', + attrs: new Stats({ + mode: 0777 | constants.S_IFDIR, + size: 4096, + uid: 9001, + gid: 8001, + atime: 1368729954, + mtime: 1368729999 + }) + }, + { filename: 'bar', + longname: '-rw-r--r-- 1 nodejs nodejs 513901992 Dec 4 2009 bar', + attrs: new Stats({ + mode: 0644 | constants.S_IFREG, + size: 513901992, + uid: 9001, + gid: 8001, + atime: 1259972199, + mtime: 1259972199 + }) + } + ]; + server.on('READDIR', function(id, handle) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.name(id, list_); + server.end(); + }); + client.readdir(handle_, function(err, list) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected readdir() error: ' + err)); + assert.deepEqual(list, + list_.slice(2), + makeMsg(what, 'dir list mismatch')); + }); + }; + }, + what: 'readdir' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + var list_ = [ + { filename: '.', + longname: 'drwxr-xr-x 56 nodejs nodejs 4096 Nov 10 01:05 .', + attrs: new Stats({ + mode: 0755 | constants.S_IFDIR, + size: 4096, + uid: 9001, + gid: 8001, + atime: 1415599549, + mtime: 1415599590 + }) + }, + { filename: '..', + longname: 'drwxr-xr-x 4 root root 4096 May 16 2013 ..', + attrs: new Stats({ + mode: 0755 | constants.S_IFDIR, + size: 4096, + uid: 0, + gid: 0, + atime: 1368729954, + mtime: 1368729999 + }) + }, + { filename: 'foo', + longname: 'drwxrwxrwx 2 nodejs nodejs 4096 Mar 8 2009 foo', + attrs: new Stats({ + mode: 0777 | constants.S_IFDIR, + size: 4096, + uid: 9001, + gid: 8001, + atime: 1368729954, + mtime: 1368729999 + }) + }, + { filename: 'bar', + longname: '-rw-r--r-- 1 nodejs nodejs 513901992 Dec 4 2009 bar', + attrs: new Stats({ + mode: 0644 | constants.S_IFREG, + size: 513901992, + uid: 9001, + gid: 8001, + atime: 1259972199, + mtime: 1259972199 + }) + } + ]; + server.on('READDIR', function(id, handle) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.name(id, list_); + server.end(); + }); + client.readdir(handle_, { full: true }, function(err, list) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected readdir() error: ' + err)); + assert.deepEqual(list, list_, makeMsg(what, 'dir list mismatch')); + }); + }; + }, + what: 'readdir (full)' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + server.on('REMOVE', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.unlink(path_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected unlink() error: ' + err)); + }); + }; + }, + what: 'remove' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + server.on('MKDIR', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.mkdir(path_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected mkdir() error: ' + err)); + }); + }; + }, + what: 'mkdir' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + server.on('RMDIR', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.rmdir(path_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected rmdir() error: ' + err)); + }); + }; + }, + what: 'rmdir' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + var name_ = { filename: '/tmp/foo' }; + server.on('REALPATH', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.name(id, name_); + server.end(); + }); + client.realpath(path_, function(err, name) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected realpath() error: ' + err)); + assert.deepEqual(name, name_.filename, makeMsg(what, 'name mismatch')); + }); + }; + }, + what: 'realpath' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + var attrs_ = new Stats({ + size: 10 * 1024, + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.on('STAT', function(id, path) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + server.attrs(id, attrs_); + server.end(); + }); + client.stat(path_, function(err, attrs) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected stat() error: ' + err)); + assert.deepEqual(attrs, attrs_, makeMsg(what, 'attrs mismatch')); + }); + }; + }, + what: 'stat' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var oldPath_ = '/foo/bar/baz'; + var newPath_ = '/tmp/foo'; + server.on('RENAME', function(id, oldPath, newPath) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(oldPath === oldPath_, + makeMsg(what, 'Wrong old path: ' + oldPath)); + assert(newPath === newPath_, + makeMsg(what, 'Wrong new path: ' + newPath)); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.rename(oldPath_, newPath_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected rename() error: ' + err)); + }); + }; + }, + what: 'rename' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var linkPath_ = '/foo/bar/baz'; + var name = { filename: '/tmp/foo' }; + server.on('READLINK', function(id, linkPath) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(linkPath === linkPath_, + makeMsg(what, 'Wrong link path: ' + linkPath)); + server.name(id, name); + server.end(); + }); + client.readlink(linkPath_, function(err, targetPath) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected readlink() error: ' + err)); + assert(targetPath === name.filename, + makeMsg(what, 'Wrong target path: ' + targetPath)); + }); + }; + }, + what: 'readlink' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var linkPath_ = '/foo/bar/baz'; + var targetPath_ = '/tmp/foo'; + server.on('SYMLINK', function(id, linkPath, targetPath) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(linkPath === linkPath_, + makeMsg(what, 'Wrong link path: ' + linkPath)); + assert(targetPath === targetPath_, + makeMsg(what, 'Wrong target path: ' + targetPath)); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + client.symlink(targetPath_, linkPath_, function(err) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(!err, makeMsg(what, 'Unexpected symlink() error: ' + err)); + }); + }; + }, + what: 'symlink' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + var handle_ = new Buffer('hi mom!'); + var data_ = new Buffer('hello world'); + server.once('OPEN', function(id, path, pflags, attrs) { + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert(pflags === OPEN_MODE.READ, + makeMsg(what, 'Wrong flags: ' + flagsToHuman(pflags))); + server.handle(id, handle_); + }).once('FSTAT', function(id, handle) { + assert(id === 1, makeMsg(what, 'Wrong request id: ' + id)); + var attrs = new Stats({ + size: data_.length, + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.attrs(id, attrs); + }).once('READ', function(id, handle, offset, len) { + assert(id === 2, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert(offset === 0, makeMsg(what, 'Wrong read offset: ' + offset)); + server.data(id, data_); + }).once('CLOSE', function(id, handle) { + ++self.state.requests; + assert(id === 3, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + var buf = []; + client.readFile(path_, function(err, buf) { + ++self.state.responses; + assert(!err, makeMsg(what, 'Unexpected error: ' + err)); + assert.deepEqual(buf, data_, makeMsg(what, 'data mismatch')); + }); + }; + }, + what: 'readFile' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/foo/bar/baz'; + var handle_ = new Buffer('hi mom!'); + var data_ = new Buffer('hello world'); + var reads = 0; + server.once('OPEN', function(id, path, pflags, attrs) { + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert(pflags === OPEN_MODE.READ, + makeMsg(what, 'Wrong flags: ' + flagsToHuman(pflags))); + server.handle(id, handle_); + }).once('FSTAT', function(id, handle) { + assert(id === 1, makeMsg(what, 'Wrong request id: ' + id)); + var attrs = new Stats({ + uid: 9001, + gid: 9001, + atime: (Date.now() / 1000) | 0, + mtime: (Date.now() / 1000) | 0 + }); + server.attrs(id, attrs); + }).on('READ', function(id, handle, offset, len) { + assert(++reads <= 2, makeMsg(what, 'Saw too many READs')); + assert(id === 2 || id === 3, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + switch(id) { + case 2: + assert(offset === 0, makeMsg(what, 'Wrong read offset for first read: ' + offset)); + server.data(id, data_); + break; + case 3: + assert(offset === data_.length, makeMsg(what, 'Wrong read offset for second read: ' + offset)); + server.status(id, STATUS_CODE.EOF); + break; + } + }).once('CLOSE', function(id, handle) { + ++self.state.requests; + assert(id === 4, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + var buf = []; + client.readFile(path_, function(err, buf) { + ++self.state.responses; + assert(!err, makeMsg(what, 'Unexpected error: ' + err)); + assert.deepEqual(buf, data_, makeMsg(what, 'data mismatch')); + }); + }; + }, + what: 'readFile (no size from fstat)' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var opens = 0; + var reads = 0; + var closes = 0; + var path_ = '/foo/bar/baz'; + var handle_ = new Buffer('hi mom!'); + var data_ = new Buffer('hello world'); + server.on('OPEN', function(id, path, pflags, attrs) { + assert(++opens === 1, makeMsg(what, 'Saw too many OPENs')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert(pflags === OPEN_MODE.READ, + makeMsg(what, 'Wrong flags: ' + flagsToHuman(pflags))); + server.handle(id, handle_); + }).on('READ', function(id, handle, offset, len) { + assert(++reads <= 2, makeMsg(what, 'Saw too many READs')); + assert(id === reads, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + if (reads === 1) { + assert(offset === 0, makeMsg(what, 'Wrong read offset: ' + offset)); + server.data(id, data_); + } else + server.status(id, STATUS_CODE.EOF); + }).on('CLOSE', function(id, handle) { + ++self.state.requests; + assert(++closes === 1, makeMsg(what, 'Saw too many CLOSEs')); + assert(id === 3, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }); + var buf = []; + client.createReadStream(path_).on('readable', function() { + var chunk; + while ((chunk = this.read()) !== null) { + buf.push(chunk); + } + }).on('end', function() { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + buf = Buffer.concat(buf); + assert.deepEqual(buf, data_, makeMsg(what, 'data mismatch')); + }); + }; + }, + what: 'ReadStream' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var opens = 0; + var path_ = '/foo/bar/baz'; + var error; + server.on('OPEN', function(id, path, pflags, attrs) { + ++opens; + ++self.state.requests; + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert(pflags === OPEN_MODE.READ, + makeMsg(what, 'Wrong flags: ' + flagsToHuman(pflags))); + server.status(id, STATUS_CODE.NO_SUCH_FILE); + server.end(); + }); + client.createReadStream(path_).on('error', function(err) { + error = err; + }).on('close', function() { + assert(opens === 1, makeMsg(what, 'Saw ' + opens + ' OPENs')); + assert(error, makeMsg(what, 'Expected error')); + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + }); + }; + }, + what: 'ReadStream (error)' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var opens = 0; + var writes = 0; + var closes = 0; + var fsetstat = false; + var path_ = '/foo/bar/baz'; + var handle_ = new Buffer('hi mom!'); + var data_ = new Buffer('hello world'); + var expFlags = OPEN_MODE.TRUNC | OPEN_MODE.CREAT | OPEN_MODE.WRITE; + server.on('OPEN', function(id, path, pflags, attrs) { + assert(++opens === 1, makeMsg(what, 'Saw too many OPENs')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert(path === path_, makeMsg(what, 'Wrong path: ' + path)); + assert(pflags === expFlags, + makeMsg(what, 'Wrong flags: ' + flagsToHuman(pflags))); + server.handle(id, handle_); + }).once('FSETSTAT', function(id, handle, attrs) { + fsetstat = true; + assert(id === 1, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert.strictEqual(attrs.mode, + parseInt('0666', 8), + makeMsg(what, 'Wrong file mode')); + server.status(id, STATUS_CODE.OK); + }).on('WRITE', function(id, handle, offset, data) { + assert(++writes <= 3, makeMsg(what, 'Saw too many WRITEs')); + assert(id === writes + 1, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + assert(offset === ((writes - 1) * data_.length), + makeMsg(what, 'Wrong write offset: ' + offset)); + assert.deepEqual(data, data_, makeMsg(what, 'Wrong data')); + server.status(id, STATUS_CODE.OK); + }).on('CLOSE', function(id, handle) { + ++self.state.requests; + assert(++closes === 1, makeMsg(what, 'Saw too many CLOSEs')); + assert(id === 5, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.status(id, STATUS_CODE.OK); + server.end(); + }).on('end', function() { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(opens === 1, makeMsg(what, 'Wrong OPEN count')); + assert(writes === 3, makeMsg(what, 'Wrong WRITE count')); + assert(closes === 1, makeMsg(what, 'Wrong CLOSE count')); + assert(fsetstat, makeMsg(what, 'Expected FSETSTAT')); + }); + + var writer = client.createWriteStream(path_); + if (writer.cork) + writer.cork(); + writer.write(data_); + writer.write(data_); + writer.write(data_); + if (writer.uncork) + writer.uncork(); + writer.end(); + }; + }, + what: 'WriteStream' + }, + +// other client request scenarios + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var handle_ = new Buffer('node.js'); + server.on('READDIR', function(id, handle) { + assert(++self.state.requests === 1, + makeMsg(what, 'Saw too many requests')); + assert(id === 0, makeMsg(what, 'Wrong request id: ' + id)); + assert.deepEqual(handle, handle_, makeMsg(what, 'handle mismatch')); + server.status(id, STATUS_CODE.EOF); + server.end(); + }); + client.readdir(handle_, function(err, list) { + assert(++self.state.responses === 1, + makeMsg(what, 'Saw too many responses')); + assert(err && err.code === STATUS_CODE.EOF, + makeMsg(what, 'Expected EOF, got: ' + err)); + }); + }; + }, + what: 'readdir (EOF)' + }, + { run: function() { + setup(this); + + var self = this; + var what = this.what; + var client = this.client; + var server = this.server; + + this.onReady = function() { + var path_ = '/tmp/foo.txt'; + var reqs = 0; + var continues = 0; + + client.unpipe(server); + + function clientCb(err, handle) { + assert(++self.state.responses <= reqs, + makeMsg(what, 'Saw too many responses')); + if (self.state.responses === reqs) { + assert(continues === 1, makeMsg(what, 'no continue event seen')); + server.end(); + } + } + + client.on('continue', function() { + assert(++continues === 1, makeMsg(what, 'saw > 1 continue event')); + }); + + while (true) { + ++reqs; + if (!client.open(path_, 'w', clientCb)) + break; + } + + client.pipe(server); + }; + }, + expected: { + requests: -1, + responses: -1 + }, + what: '"continue" event after push() === false' + }, + { run: function() { + var self = this; + var client = new SFTPStream(); + client.once('ready', function() { + client.open('/foo/bar', 'w', function(err, handle) { + assert(err, 'Expected error'); + assert.strictEqual(err.code, 4); + assert.strictEqual(err.message, 'Uh oh'); + assert.strictEqual(err.lang, ''); + next(); + }); + client.write(new Buffer([ + 0, 0, 0, 18, + 101, + 0, 0, 0, 0, + 0, 0, 0, SFTPStream.STATUS_CODE.FAILURE, + 0, 0, 0, 5, 85, 104, 32, 111, 104 + ])); + }); + client.write(new Buffer([ + 0, 0, 0, 5, + 2, + 0, 0, 0, 3 + ])); + }, + what: 'Can parse status response without language' + }, + { run: function() { + var self = this; + var client = new SFTPStream(); + client.once('ready', function() { + client.open('/foo/bar', 'w', function(err, handle) { + assert(err, 'Expected error'); + assert.strictEqual(err.code, 4); + assert.strictEqual(err.message, 'Failure'); + assert.strictEqual(err.lang, ''); + next(); + }); + client.write(new Buffer([ + 0, 0, 0, 9, + 101, + 0, 0, 0, 0, + 0, 0, 0, SFTPStream.STATUS_CODE.FAILURE + ])); + }); + client.write(new Buffer([ + 0, 0, 0, 5, + 2, + 0, 0, 0, 3 + ])); + }, + what: 'Can parse status response without message' + }, + { run: function() { + var self = this; + var err; + var client = new SFTPStream(); + client.once('ready', function() { + assert(false, 'Handshake should not succeed'); + }).once('error', function(err_) { + err = err_; + }).once('end', function() { + assert.strictEqual(err && err.message, + 'Unexpected packet before version'); + next(); + }); + client.write(new Buffer([ + 1, 2, 3, 4, + 5, + 6, 7, 8, 9 + ])); + }, + what: 'End SFTP stream on bad handshake (client)' + }, + { run: function() { + var self = this; + var err; + var client = new SFTPStream({ server: true }); + client.once('ready', function() { + assert(false, 'Handshake should not succeed'); + }).once('error', function(err_) { + err = err_; + }).once('end', function() { + assert.strictEqual(err && err.message, + 'Unexpected packet before init'); + next(); + }); + client.write(new Buffer([ + 1, 2, 3, 4, + 5, + 6, 7, 8, 9 + ])); + }, + what: 'End SFTP stream on bad handshake (server)' + }, +]; + +function setup(self) { + var expectedRequests = (self.expected && self.expected.requests) || 1; + var expectedResponses = (self.expected && self.expected.responses) || 1; + var clientEnded = false; + var serverEnded = false; + + self.state = { + clientReady: false, + serverReady: false, + requests: 0, + responses: 0 + }; + + self.client = new SFTPStream(); + self.server = new SFTPStream({ server: true }); + + self.server.on('error', onError) + .on('ready', onReady) + .on('end', onEnd); + self.client.on('error', onError) + .on('ready', onReady) + .on('end', onEnd); + + function onError(err) { + var which = (this === self.server ? 'server' : 'client'); + assert(false, makeMsg(self.what, 'Unexpected ' + which + ' error: ' + err)); + } + function onReady() { + if (this === self.client) { + assert(!self.state.clientReady, + makeMsg(self.what, 'Received multiple ready events for client')); + self.state.clientReady = true; + } else { + assert(!self.state.serverReady, + makeMsg(self.what, 'Received multiple ready events for server')); + self.state.serverReady = true; + } + if (self.state.clientReady && self.state.serverReady) + self.onReady && self.onReady(); + } + function onEnd() { + if (this === self.client) { + assert(!clientEnded, + makeMsg(self.what, 'Received multiple close events for client')); + clientEnded = true; + } else { + assert(!serverEnded, + makeMsg(self.what, 'Received multiple close events for server')); + serverEnded = true; + } + if (clientEnded && serverEnded) { + var msg; + if (expectedRequests > 0) { + msg = 'Expected ' + expectedRequests + ' request(s) but received ' + + self.state.requests; + assert(self.state.requests === expectedRequests, + makeMsg(self.what, msg)); + } + if (expectedResponses > 0) { + msg = 'Expected ' + expectedResponses + ' response(s) but received ' + + self.state.responses; + assert(self.state.responses === expectedResponses, + makeMsg(self.what, msg)); + } + next(); + } + } + + process.nextTick(function() { + self.client.pipe(self.server).pipe(self.client); + }); +} + +function flagsToHuman(flags) { + var ret = []; + + for (var i = 0, keys = Object.keys(OPEN_MODE), len = keys.length; i < len; ++i) + if (flags & OPEN_MODE[keys[i]]) + ret.push(keys[i]); + + return ret.join(' | '); +} + +function next() { + if (++t === tests.length) + return; + + var v = tests[t]; + v.run.call(v); +} + +function makeMsg(what, msg) { + return '[' + group + what + ']: ' + msg; +} + +process.once('exit', function() { + assert(t === tests.length, + makeMsg('_exit', + 'Only finished ' + t + '/' + tests.length + ' tests')); +}); + +next(); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-ssh.js b/reverse_engineering/node_modules/ssh2-streams/test/test-ssh.js new file mode 100644 index 0000000..5d61522 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-ssh.js @@ -0,0 +1,143 @@ +var SSH2Stream = require('../lib/ssh'); +var utils = require('../lib/utils'); +var parseKey = utils.parseKey; +var genPubKey = utils.genPublicKey; + +var basename = require('path').basename; +var assert_ = require('assert'); +var inherits = require('util').inherits; +var inspect = require('util').inspect; +var TransformStream = require('stream').Transform; +var fs = require('fs'); + +var group = basename(__filename, '.js') + '/'; +var t = -1; +var SERVER_KEY = fs.readFileSync(__dirname + '/fixtures/ssh_host_rsa_key'); +var HOST_KEYS = { 'ssh-rsa': makeServerKey(SERVER_KEY) }; + +function SimpleStream() { + TransformStream.call(this); + this.buffer = ''; +} +inherits(SimpleStream, TransformStream); +SimpleStream.prototype._transform = function(chunk, encoding, cb) { + this.buffer += chunk.toString('binary'); + cb(null, chunk); +}; + +var tests = [ + // client-side tests + { run: function() { + var algos = ['ssh-dss', 'ssh-rsa', 'ecdsa-sha2-nistp521']; + var client = new SSH2Stream({ + algorithms: { + serverHostKey: algos + } + }); + var clientBufStream = new SimpleStream(); + var clientReady = false; + var server = new SSH2Stream({ + server: true, + hostKeys: HOST_KEYS + }); + var serverBufStream = new SimpleStream(); + var serverReady = false; + + function onNEWKEYS() { + if (this === client) { + assert(!clientReady, 'Already received client NEWKEYS event'); + clientReady = true; + } else { + assert(!serverReady, 'Already received server NEWKEYS event'); + serverReady = true; + } + if (clientReady && serverReady) { + var traffic = clientBufStream.buffer; + var algoList = algos.join(','); + var re = new RegExp('\x00\x00\x00' + + hexByte(algoList.length) + + algoList); + assert(re.test(traffic), 'Unexpected client algorithms'); + + traffic = serverBufStream.buffer; + assert(/\x00\x00\x00\x07ssh-rsa/.test(traffic), + 'Unexpected server algorithms'); + + next(); + } + } + + client.on('NEWKEYS', onNEWKEYS); + server.on('NEWKEYS', onNEWKEYS); + + client.pipe(clientBufStream) + .pipe(server) + .pipe(serverBufStream) + .pipe(client); + }, + what: 'Custom algorithms' + }, + { run: function() { + var serverIdent = 'testing \t'; + var expectedFullIdent = 'SSH-2.0-' + serverIdent; + + var client = new SSH2Stream({}); + client.on('header', function(header) { + assert(header.identRaw === expectedFullIdent, + '\nSaw: ' + inspect(header.identRaw) + '\n' + + 'Expected: ' + inspect(expectedFullIdent)); + next(); + }); + + var server = new SSH2Stream({ + server: true, + hostKeys: HOST_KEYS, + ident: serverIdent + }); + + client.pipe(server).pipe(client); + }, + what: 'Remote ident is not trimmed' + } +]; + +function makeServerKey(raw) { + var privateKey = parseKey(raw); + return { + privateKey: privateKey, + publicKey: genPubKey(privateKey) + }; +} + +function hexByte(n) { + return String.fromCharCode(n); +} + +function assert(expression, msg) { + msg || (msg = 'failed assertion'); + assert_(expression, makeMsg(tests[t].what, msg)); +} + + + +function next() { + if (Array.isArray(process._events.exit)) + process._events.exit = process._events.exit[1]; + if (++t === tests.length) + return; + + var v = tests[t]; + v.run.call(v); +} + +function makeMsg(what, msg) { + return '[' + group + what + ']: ' + msg; +} + +process.once('exit', function() { + assert_(t === tests.length, + makeMsg('_exit', + 'Only finished ' + t + '/' + tests.length + ' tests')); +}); + +next(); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test-utils.js b/reverse_engineering/node_modules/ssh2-streams/test/test-utils.js new file mode 100644 index 0000000..2949ab3 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test-utils.js @@ -0,0 +1,678 @@ +var utils = require('../lib/utils'); + +var fs = require('fs'); +var path = require('path'); +var assert = require('assert'); + +var t = -1; +var group = path.basename(__filename, '.js') + '/'; +var fixturesdir = path.join(__dirname, 'fixtures'); + +var tests = [ + { run: function() { + var what = this.what; + var r; + + assert.strictEqual(r = utils.readInt(new Buffer([0,0,0]), 0), + false, + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readInt - without stream callback - failure #1' + }, + { run: function() { + var what = this.what; + var r; + + assert.strictEqual(r = utils.readInt(new Buffer([]), 0), + false, + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readInt - without stream callback - failure #2' + }, + { run: function() { + var what = this.what; + var r; + + assert.strictEqual(r = utils.readInt(new Buffer([0,0,0,5]), 0), + 5, + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readInt - without stream callback - success' + }, + { run: function() { + var what = this.what; + var callback = function() {}; + var stream = { + _cleanup: function(cb) { + cleanupCalled = true; + assert(cb === callback, makeMsg(what, 'Wrong callback')); + } + }; + var cleanupCalled = false; + var r; + + assert.strictEqual(r = utils.readInt(new Buffer([]), 0, stream, callback), + false, + makeMsg(what, 'Wrong result: ' + r)); + assert(cleanupCalled, makeMsg(what, 'Cleanup not called')); + next(); + }, + what: 'readInt - with stream callback' + }, + { run: function() { + var what = this.what; + var r; + + assert.strictEqual(r = utils.readString(new Buffer([0,0,0]), 0), + false, + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readString - without stream callback - bad length #1' + }, + { run: function() { + var what = this.what; + var r; + + assert.strictEqual(r = utils.readString(new Buffer([]), 0), + false, + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readString - without stream callback - bad length #2' + }, + { run: function() { + var what = this.what; + var r; + + assert.deepEqual(r = utils.readString(new Buffer([0,0,0,1,5]), 0), + new Buffer([5]), + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readString - without stream callback - success' + }, + { run: function() { + var what = this.what; + var r; + + assert.deepEqual(r = utils.readString(new Buffer([0,0,0,1,33]), 0, 'ascii'), + '!', + makeMsg(what, 'Wrong result: ' + r)); + next(); + }, + what: 'readString - without stream callback - encoding' + }, + { run: function() { + var what = this.what; + var callback = function() {}; + var stream = { + _cleanup: function(cb) { + cleanupCalled = true; + assert(cb === callback, makeMsg(what, 'Wrong callback')); + } + }; + var cleanupCalled = false; + var r; + + assert.deepEqual(r = utils.readString(new Buffer([0,0,0,1]), + 0, + stream, + callback), + false, + makeMsg(what, 'Wrong result: ' + r)); + assert(cleanupCalled, makeMsg(what, 'Cleanup not called')); + next(); + }, + what: 'readString - with stream callback - no encoding' + }, + { run: function() { + var what = this.what; + var callback = function() {}; + var stream = { + _cleanup: function(cb) { + cleanupCalled = true; + assert(cb === callback, makeMsg(what, 'Wrong callback')); + } + }; + var cleanupCalled = false; + var r; + + assert.deepEqual(r = utils.readString(new Buffer([0,0,0,1]), + 0, + 'ascii', + stream, + callback), + false, + makeMsg(what, 'Wrong result: ' + r)); + assert(cleanupCalled, makeMsg(what, 'Cleanup not called')); + next(); + }, + what: 'readString - with stream callback - encoding' + }, + { run: function() { + var what = this.what; + var filepath = fixturesdir + '/encrypted-rsa.ppk'; + var passphrase = 'node.js'; + var keyInfo = utils.parseKey(fs.readFileSync(filepath)); + + utils.decryptKey(keyInfo, passphrase); + + var expPrivOrig = new Buffer([ + 45,45,45,45,45,66,69,71,73,78,32,82,83,65,32,80,82,73,86,65,84,69,32,75, + 69,89,45,45,45,45,45,10,77,73,73,67,87,81,73,66,65,65,75,66,103,71,115, + 70,89,82,77,66,85,68,73,109,97,52,48,98,110,101,80,66,77,48,79,86,115, + 104,119,102,109,87,115,83,57,122,72,113,88,72,108,115,122,111,81,113,68, + 82,101,89,52,103,102,51,10,112,87,112,83,78,76,116,53,70,70,78,77,80,50, + 87,107,69,68,121,70,105,71,83,115,54,77,55,72,51,102,56,108,89,72,43,108, + 120,85,51,122,56,72,99,78,103,101,121,70,80,48,52,70,98,85,77,75,83,115, + 51,67,54,51,97,108,10,115,110,97,104,107,52,71,75,117,71,69,67,118,55,71, + 112,89,87,118,102,110,110,85,87,112,118,78,111,73,104,101,107,52,113,53, + 117,118,103,82,119,106,65,75,75,107,71,88,111,47,69,116,82,74,56,101,68, + 65,103,69,108,65,111,71,65,10,85,43,71,102,72,76,118,88,69,111,122,81,49, + 109,72,65,56,77,102,99,69,109,67,83,104,76,55,83,77,86,81,78,50,119,80, + 76,56,72,102,103,73,109,89,108,55,43,97,72,112,87,69,56,100,101,49,110, + 109,100,116,119,121,54,112,50,10,52,80,89,50,80,85,89,81,57,80,89,53,55, + 105,51,122,76,56,78,90,100,56,87,81,55,82,103,48,82,66,72,68,108,110,100, + 97,70,101,70,52,69,102,48,117,76,98,111,113,89,100,47,120,78,48,114,122, + 102,121,53,53,122,55,104,87,10,79,76,43,56,86,104,111,120,84,114,66,85, + 118,118,101,79,104,90,119,66,80,107,79,101,72,102,120,109,107,86,122,51, + 120,98,98,114,103,51,107,78,108,111,48,67,81,81,68,74,89,80,75,116,67, + 115,47,108,52,54,75,74,109,78,51,108,10,85,65,78,100,73,52,81,73,117,87, + 81,43,90,108,108,122,55,112,57,52,70,102,100,111,116,110,107,118,113,71, + 43,43,66,112,49,119,79,113,74,83,67,105,104,54,85,86,105,119,76,102,118, + 112,78,90,116,71,77,67,116,107,52,54,87,78,10,104,99,48,122,65,107,69,65, + 105,65,121,78,52,87,85,115,47,48,120,52,87,111,118,71,57,53,54,74,49,65, + 43,117,83,69,75,101,87,122,117,113,102,112,71,71,98,87,103,90,57,88,102, + 110,80,110,107,43,49,65,108,56,70,79,87,49,10,116,117,57,87,87,114,77,80, + 73,97,118,81,110,90,87,47,100,88,120,104,107,101,78,87,84,72,55,56,99,81, + 74,66,65,76,107,77,43,113,122,90,103,77,86,112,90,79,48,107,115,68,113, + 65,52,72,56,90,116,53,108,81,97,102,81,109,10,115,120,67,87,70,102,43, + 108,101,53,67,110,114,97,70,113,87,78,103,104,119,82,115,70,99,112,67,84, + 116,110,52,56,54,98,97,109,121,56,57,104,115,85,100,113,105,76,50,83,54, + 121,103,97,70,111,69,67,81,70,68,107,51,114,49,101,10,119,77,56,109,106, + 77,65,51,98,50,76,77,43,65,71,77,121,72,51,43,71,80,102,53,57,113,119, + 102,76,86,88,80,77,103,101,84,90,117,98,103,84,116,55,119,52,102,54,87, + 98,65,118,111,81,83,56,67,114,119,48,97,68,86,98,72,10,118,102,76,85,86, + 98,67,119,114,57,112,49,66,77,48,67,81,70,83,66,106,67,97,47,102,122,101, + 73,67,86,107,80,70,66,97,75,81,85,109,88,106,81,51,73,99,80,84,79,114,57, + 48,109,83,65,105,80,110,65,65,112,112,83,119,84,10,106,53,83,89,83,102, + 69,57,114,83,86,98,43,69,104,81,48,104,107,50,86,75,87,73,102,111,99,78, + 72,66,68,49,77,65,78,57,122,98,52,61,10,45,45,45,45,45,69,78,68,32,82,83, + 65,32,80,82,73,86,65,84,69,32,75,69,89,45,45,45,45,45 + ]); + assert(keyInfo.ppk === true, makeMsg(what, 'Expected PPK flag')); + assert(keyInfo._converted === true, + makeMsg(what, 'Expected automatic private PEM generation')); + assert(keyInfo._macresult === true, + makeMsg(what, 'Expected successful MAC verification')); + assert(keyInfo._decrypted === true, + makeMsg(what, 'Expected decrypted flag')); + assert.deepEqual(keyInfo.privateOrig, + expPrivOrig, + makeMsg(what, 'Decrypted private PEM data mismatch')); + next(); + }, + what: 'decryptKey - with encrypted RSA PPK' + }, + { run: function() { + var what = this.what; + var filepath = fixturesdir + '/encrypted-dsa.ppk'; + var passphrase = 'node.js'; + var keyInfo = utils.parseKey(fs.readFileSync(filepath)); + + utils.decryptKey(keyInfo, passphrase); + + var expPrivOrig = new Buffer([ + 45,45,45,45,45,66,69,71,73,78,32,68,83,65,32,80,82,73,86,65,84,69,32,75, + 69,89,45,45,45,45,45,10,77,73,73,66,117,103,73,66,65,65,75,66,103,81,67, + 90,57,105,80,71,72,110,48,97,78,119,98,66,72,111,112,48,76,102,67,107, + 79,72,66,77,103,75,119,76,79,50,80,49,117,57,57,54,69,85,109,68,105,77, + 49,104,100,83,98,116,10,100,117,77,114,67,113,53,111,78,113,74,76,47,116, + 79,81,109,72,73,49,100,50,75,101,65,77,48,72,113,74,109,65,74,89,74,103, + 102,43,56,81,104,74,49,109,104,74,56,81,115,65,77,90,113,54,121,84,74, + 106,54,53,77,68,89,120,10,122,105,73,117,56,106,79,85,68,104,80,100,67, + 68,80,48,80,105,67,81,79,66,68,119,88,48,109,47,108,54,47,72,50,73,97,54, + 101,100,121,106,82,49,85,112,51,78,105,112,68,113,113,97,78,104,98,67,73, + 119,73,86,65,76,103,50,10,85,47,110,81,97,114,74,83,113,114,89,72,122,90, + 87,72,47,68,109,80,100,80,80,66,65,111,71,65,72,107,104,113,74,118,83, + 121,122,88,99,51,77,65,104,53,120,110,56,83,106,90,120,77,57,43,101,83, + 105,69,119,65,48,56,89,105,10,75,81,98,53,48,70,118,110,103,120,56,69,76, + 121,77,79,108,100,106,110,79,57,50,121,103,114,117,87,89,113,50,90,105, + 68,70,117,99,79,105,111,48,70,99,74,76,107,65,97,66,102,83,113,75,118,57, + 114,117,108,88,110,114,55,83,47,10,97,81,43,107,119,99,48,105,122,70,99, + 79,97,86,100,122,53,104,79,80,119,118,51,105,52,109,108,77,87,83,121,66, + 51,87,56,54,97,106,53,65,76,119,70,65,97,49,121,112,73,57,73,111,56,51, + 68,99,119,113,100,88,55,104,102,66,10,57,75,98,48,102,77,107,67,103,89, + 65,109,118,86,43,107,113,87,104,85,103,68,89,119,78,78,122,49,113,68,97, + 111,83,56,88,100,115,79,112,111,110,117,116,90,47,48,115,116,82,81,54,54, + 109,75,65,121,56,107,78,86,78,78,81,54,10,111,85,120,49,88,70,108,49,87, + 85,116,52,105,121,70,89,47,50,82,122,50,102,90,104,76,122,53,47,84,98,90, + 82,75,53,121,103,111,54,54,54,87,103,110,120,66,47,85,100,52,71,65,120, + 47,66,80,81,84,103,104,79,74,74,79,76,10,48,48,118,74,107,43,56,106,86, + 67,71,78,68,99,57,52,50,86,54,110,70,88,122,110,68,77,88,119,113,120,104, + 82,67,87,54,100,109,43,50,108,84,104,55,110,116,114,108,105,56,109,67, + 107,53,103,73,85,67,74,90,75,65,77,65,122,10,107,121,114,50,118,108,50, + 80,101,52,56,97,100,105,56,86,115,57,115,61,10,45,45,45,45,45,69,78,68, + 32,68,83,65,32,80,82,73,86,65,84,69,32,75,69,89,45,45,45,45,45, + ]); + assert(keyInfo.ppk === true, makeMsg(what, 'Expected PPK flag')); + assert(keyInfo._converted === true, + makeMsg(what, 'Expected automatic private PEM generation')); + assert(keyInfo._macresult === true, + makeMsg(what, 'Expected successful MAC verification')); + assert(keyInfo._decrypted === true, + makeMsg(what, 'Expected decrypted flag')); + assert.deepEqual(keyInfo.privateOrig, + expPrivOrig, + makeMsg(what, 'Decrypted private PEM data mismatch')); + next(); + }, + what: 'decryptKey - with encrypted DSA PPK' + }, + { run: function() { + var what = this.what; + var filepath = fixturesdir + '/id_rsa_enc'; + var passphrase = 'foobarbaz'; + var keyInfo = utils.parseKey(fs.readFileSync(filepath)); + + utils.decryptKey(keyInfo, passphrase); + + var expPriv = new Buffer([ + 0x30, 0x82, 0x04, 0xa5, 0x02, 0x01, 0x00, 0x02, 0x82, 0x01, 0x01, 0x00, + 0xec, 0x9f, 0xd7, 0x6e, 0x17, 0xfa, 0xe4, 0xc5, 0xff, 0xac, 0x83, 0x6e, + 0xbe, 0x60, 0x66, 0xb2, 0xf2, 0x6a, 0x34, 0xfa, 0x4f, 0xe2, 0x49, 0xcd, + 0x54, 0x29, 0x34, 0x95, 0x3b, 0x55, 0xc1, 0xf5, 0x1f, 0x16, 0x3f, 0x6a, + 0x9f, 0xe5, 0x93, 0x7c, 0x25, 0xe0, 0x92, 0xdb, 0x63, 0x8d, 0xbb, 0xb4, + 0xc2, 0x24, 0x7c, 0x9c, 0x69, 0x4c, 0xe0, 0xa7, 0x21, 0xac, 0xfc, 0xd3, + 0x44, 0x3b, 0x1a, 0xaf, 0x9e, 0x60, 0x93, 0x09, 0xd3, 0xac, 0xb4, 0x65, + 0x88, 0x39, 0x85, 0x8c, 0xd2, 0x04, 0x2c, 0xaf, 0x85, 0x27, 0x92, 0x59, + 0x1b, 0x28, 0x73, 0x99, 0xf9, 0xc1, 0x6c, 0x37, 0x08, 0xa2, 0x77, 0x58, + 0x5a, 0x8c, 0xb0, 0x96, 0xc0, 0x63, 0x8a, 0x10, 0x10, 0x84, 0xd8, 0xfa, + 0x1e, 0xb7, 0x27, 0x58, 0xc1, 0x6b, 0x34, 0xb4, 0xe6, 0xa1, 0x05, 0x68, + 0x82, 0xc9, 0xe3, 0x6a, 0x6f, 0x0b, 0xb5, 0xf7, 0x13, 0xa8, 0x8b, 0x14, + 0x4a, 0x6e, 0x0a, 0x72, 0x39, 0xa6, 0x1f, 0xa0, 0x4f, 0xcb, 0x72, 0xd9, + 0xe5, 0x61, 0x61, 0xa7, 0x63, 0x53, 0xae, 0x66, 0xd6, 0xba, 0xb4, 0xda, + 0x98, 0x92, 0xb4, 0x50, 0x93, 0x36, 0xdf, 0x9a, 0xfe, 0x58, 0x36, 0x6d, + 0x31, 0xd7, 0xff, 0x01, 0x88, 0xe4, 0x49, 0x3b, 0x71, 0x8f, 0x09, 0xe6, + 0x6f, 0xc9, 0xe1, 0x98, 0x51, 0x8c, 0xc4, 0xfa, 0x16, 0xb9, 0x45, 0x14, + 0x7b, 0x9e, 0x0e, 0x09, 0x6d, 0x07, 0x1e, 0x79, 0x4e, 0xa1, 0xb6, 0xf0, + 0xdd, 0x0a, 0x3d, 0xa3, 0x9c, 0xf4, 0xeb, 0x5f, 0xaa, 0x29, 0x37, 0x8c, + 0xb4, 0x03, 0x25, 0xac, 0xe8, 0x64, 0xf6, 0x07, 0xbe, 0xca, 0xc8, 0x48, + 0x39, 0x51, 0xaf, 0x36, 0x42, 0xdd, 0x32, 0x7d, 0x37, 0x7a, 0xdd, 0xd6, + 0xbf, 0x57, 0xf9, 0x10, 0xd7, 0x9f, 0xe4, 0xb8, 0xc8, 0xa7, 0x1b, 0x0c, + 0x89, 0x69, 0xf0, 0x9d, 0x02, 0x03, 0x01, 0x00, 0x01, 0x02, 0x82, 0x01, + 0x01, 0x00, 0xcb, 0xad, 0x9a, 0xe6, 0x6d, 0x45, 0xcd, 0x7e, 0x91, 0x61, + 0x91, 0x90, 0xd5, 0xd6, 0x6d, 0x10, 0x43, 0x92, 0x20, 0x20, 0x06, 0x7b, + 0x26, 0x43, 0xd3, 0xd9, 0xd4, 0x25, 0x50, 0x79, 0xb5, 0x06, 0xa5, 0xc7, + 0xa4, 0xb6, 0xc7, 0x03, 0xfa, 0x3b, 0xb6, 0xee, 0xe3, 0xfa, 0x6c, 0x6b, + 0x27, 0xd3, 0xa3, 0xf9, 0x7c, 0x39, 0xb6, 0x7a, 0x32, 0x36, 0x2a, 0xca, + 0x98, 0xa5, 0xd1, 0xe9, 0x7e, 0x43, 0x04, 0xf4, 0xe4, 0x1c, 0x65, 0x54, + 0x17, 0xc3, 0xfd, 0xca, 0x65, 0xa6, 0x9d, 0x70, 0x72, 0x76, 0x73, 0x0b, + 0x68, 0xf1, 0xc2, 0x6a, 0xc3, 0x77, 0x1a, 0x80, 0xe0, 0x01, 0x4f, 0x31, + 0x69, 0xc1, 0x67, 0xd0, 0x0a, 0x80, 0xf9, 0x01, 0xb5, 0x69, 0xb6, 0x8e, + 0x63, 0xa6, 0x88, 0xa1, 0xe7, 0x00, 0x83, 0x1d, 0x20, 0xb5, 0x46, 0x7f, + 0xfc, 0x03, 0xc5, 0xf0, 0xba, 0x0c, 0x77, 0xa1, 0x80, 0xf2, 0x90, 0xcf, + 0x0a, 0x57, 0x14, 0xa0, 0xc7, 0x30, 0x5b, 0x10, 0x2c, 0x78, 0x85, 0xa2, + 0x52, 0x7e, 0xf1, 0xa8, 0xd0, 0xbc, 0x78, 0x3e, 0x7e, 0xf6, 0xc5, 0xc5, + 0xac, 0xf3, 0x02, 0xd4, 0xdc, 0x02, 0x20, 0xb8, 0xcc, 0x37, 0xe1, 0xaa, + 0x2d, 0x24, 0xed, 0x44, 0xfb, 0x77, 0x20, 0xe5, 0xc4, 0x38, 0x63, 0x08, + 0xe2, 0x92, 0xd2, 0x6a, 0xdf, 0x0a, 0xcb, 0x54, 0x97, 0x91, 0xc7, 0x55, + 0x9c, 0x31, 0x00, 0xe2, 0xf2, 0x25, 0xda, 0xd8, 0x6c, 0xea, 0x6d, 0xf2, + 0xff, 0x98, 0x16, 0x17, 0xf5, 0xfa, 0x5c, 0xd8, 0xfd, 0x8a, 0xf7, 0xea, + 0x35, 0xa0, 0x96, 0xbd, 0xed, 0xd3, 0x35, 0x1c, 0xee, 0x78, 0xa3, 0x1f, + 0xfd, 0x0c, 0x24, 0x4b, 0xad, 0x4f, 0xa5, 0x08, 0xb3, 0xd7, 0x90, 0xe7, + 0x08, 0x60, 0x52, 0x26, 0xee, 0x93, 0x40, 0x80, 0xd1, 0xaf, 0xaf, 0x74, + 0x4d, 0x3b, 0x6f, 0x4e, 0x42, 0x91, 0x02, 0x81, 0x81, 0x00, 0xfe, 0x26, + 0x9d, 0x81, 0xa1, 0xf3, 0xe2, 0x48, 0xad, 0x22, 0x4d, 0xf4, 0x1c, 0x68, + 0x4d, 0xe1, 0xe2, 0xff, 0x09, 0xbf, 0xa6, 0x12, 0x0a, 0x16, 0xce, 0xd6, + 0x34, 0x73, 0x7d, 0x55, 0xd3, 0x35, 0x6f, 0xb2, 0x4d, 0x64, 0xcb, 0x5e, + 0x5e, 0x1a, 0xc8, 0xcf, 0x29, 0x58, 0x28, 0x30, 0xec, 0x29, 0x95, 0x09, + 0x1b, 0x11, 0x80, 0xba, 0xe0, 0x16, 0x99, 0x49, 0x76, 0xb3, 0x42, 0x72, + 0x05, 0x10, 0xf2, 0xf5, 0xa8, 0x36, 0x84, 0xf1, 0x83, 0x57, 0x5d, 0x27, + 0xae, 0xe9, 0xae, 0x13, 0x47, 0xf3, 0xda, 0xb1, 0x42, 0xde, 0xa1, 0x25, + 0xc7, 0x72, 0xee, 0x2e, 0x34, 0x78, 0x64, 0x6a, 0xed, 0x91, 0x81, 0xaf, + 0x1e, 0xd3, 0xed, 0x68, 0x91, 0x37, 0x2f, 0xdd, 0x57, 0x5d, 0x2a, 0x3a, + 0x21, 0x86, 0x74, 0x6d, 0xff, 0x0b, 0x56, 0xac, 0xc2, 0x65, 0x3a, 0x88, + 0x91, 0x5e, 0x3c, 0x10, 0x7e, 0x53, 0x02, 0x81, 0x81, 0x00, 0xee, 0x58, + 0x94, 0xcc, 0x96, 0x1b, 0x9a, 0x63, 0x84, 0x45, 0x7c, 0x92, 0x78, 0x35, + 0x17, 0x7e, 0x7f, 0x7d, 0x6f, 0x06, 0x77, 0x69, 0x5c, 0xc4, 0xe8, 0xc7, + 0x19, 0xd2, 0x5e, 0x58, 0xd2, 0x1d, 0xee, 0x4c, 0xf2, 0xd9, 0xcb, 0xca, + 0x2a, 0x27, 0xec, 0x5b, 0x55, 0x37, 0x66, 0x0d, 0x2c, 0xe6, 0xfd, 0x48, + 0x35, 0x51, 0x66, 0x13, 0x1e, 0xab, 0x70, 0xda, 0xe6, 0x45, 0xac, 0x25, + 0x8b, 0x2b, 0x89, 0x4b, 0x19, 0x99, 0x6a, 0x06, 0x81, 0x24, 0xd5, 0xa9, + 0x3c, 0xf6, 0xc4, 0x28, 0x39, 0x70, 0x24, 0x0d, 0x8d, 0xcd, 0x69, 0xb4, + 0x65, 0x78, 0x0d, 0xaf, 0x4f, 0x68, 0xe3, 0xac, 0x9d, 0xaf, 0x07, 0x93, + 0x10, 0xeb, 0xc9, 0x40, 0x8c, 0x82, 0x85, 0x85, 0x16, 0xc0, 0xfc, 0x49, + 0x76, 0x03, 0x63, 0x7a, 0x2c, 0x95, 0x61, 0xc8, 0x3b, 0x07, 0x79, 0x68, + 0x60, 0xb1, 0xc8, 0xf0, 0x97, 0x4f, 0x02, 0x81, 0x81, 0x00, 0xe4, 0xa3, + 0x3c, 0xa3, 0x38, 0x5d, 0x3d, 0x3f, 0x00, 0x72, 0x92, 0x0a, 0x7f, 0xdb, + 0xdd, 0xe4, 0xce, 0xdf, 0x7d, 0x97, 0xaa, 0x01, 0x24, 0x8e, 0x6c, 0x39, + 0x0c, 0x2a, 0xb1, 0xa0, 0x9a, 0x47, 0xc2, 0x5a, 0x77, 0x81, 0xab, 0xeb, + 0x13, 0x61, 0xa9, 0x31, 0xa5, 0x12, 0x27, 0xe6, 0x0b, 0x2f, 0x45, 0x62, + 0x51, 0xb5, 0xa7, 0x47, 0x76, 0xfd, 0x1d, 0x9d, 0x97, 0x69, 0xa0, 0xe7, + 0x0e, 0x63, 0xb7, 0x0f, 0x04, 0xeb, 0x37, 0x22, 0x46, 0x74, 0x3b, 0xdb, + 0xcd, 0x61, 0x70, 0x36, 0xec, 0x4e, 0x16, 0x79, 0xcd, 0x9c, 0x97, 0x00, + 0x73, 0xb3, 0x93, 0x4e, 0x81, 0xe9, 0xa4, 0xfd, 0x05, 0x08, 0x17, 0xd0, + 0xc1, 0x3d, 0x0a, 0xa1, 0x3d, 0xb2, 0x96, 0x1e, 0xdb, 0xcf, 0x76, 0x83, + 0xa1, 0x51, 0x62, 0x40, 0xea, 0x66, 0xfa, 0xec, 0xa4, 0x5d, 0x89, 0x10, + 0xff, 0x25, 0xf5, 0x87, 0x28, 0xfd, 0x02, 0x81, 0x80, 0x0e, 0xb4, 0x22, + 0x41, 0xd8, 0xc4, 0xcd, 0x2a, 0x74, 0x7f, 0x80, 0xe6, 0xdc, 0x49, 0x92, + 0x30, 0x78, 0x96, 0xf9, 0x61, 0x71, 0xbe, 0x6b, 0x3d, 0xae, 0x8a, 0x91, + 0xda, 0x3b, 0x7d, 0xc9, 0x40, 0x95, 0x71, 0xe3, 0xcd, 0x71, 0xd7, 0xff, + 0xef, 0xc4, 0x92, 0x01, 0xd8, 0xd0, 0x0f, 0xe2, 0x04, 0x41, 0xfd, 0xd1, + 0x64, 0x3b, 0x22, 0xd4, 0xd2, 0x88, 0xbc, 0xc8, 0x55, 0xe5, 0xff, 0xce, + 0xed, 0x19, 0xa4, 0x2b, 0x69, 0x1e, 0x74, 0x56, 0x45, 0x3a, 0x75, 0x1d, + 0x50, 0xaf, 0xdb, 0x37, 0x67, 0xe6, 0xa1, 0x7b, 0x6c, 0xff, 0xa7, 0x64, + 0x57, 0x1a, 0xa9, 0x05, 0x02, 0x18, 0x81, 0x8e, 0x9c, 0xbc, 0x9f, 0xe2, + 0xfc, 0x58, 0xc7, 0x05, 0xa4, 0x0b, 0xae, 0xa1, 0x2f, 0xb8, 0xa0, 0xa3, + 0x8a, 0x23, 0xf9, 0xe6, 0x84, 0x34, 0xab, 0x10, 0x91, 0x2e, 0x79, 0x34, + 0xf5, 0xe2, 0xca, 0x8c, 0xdb, 0x02, 0x81, 0x81, 0x00, 0x8e, 0x9e, 0xc9, + 0xc2, 0xdf, 0xf7, 0x36, 0xef, 0x6e, 0x2a, 0x36, 0xeb, 0xd7, 0xa4, 0x52, + 0x43, 0x27, 0x8d, 0xaa, 0x52, 0x2d, 0xa8, 0xc1, 0x66, 0xf4, 0x9f, 0xc6, + 0x78, 0x9e, 0x31, 0x64, 0xe6, 0x56, 0xc9, 0x6d, 0x85, 0x79, 0x2a, 0x5c, + 0xca, 0x53, 0x2b, 0x1a, 0x46, 0xf4, 0x16, 0x60, 0xfe, 0x41, 0xcf, 0xc7, + 0x74, 0x57, 0xd0, 0x06, 0xf4, 0xc9, 0xfa, 0x47, 0x9c, 0xa2, 0xcb, 0xe0, + 0x85, 0xc5, 0x95, 0x9f, 0x35, 0xdd, 0x4c, 0x15, 0x7a, 0xda, 0x34, 0xc4, + 0x81, 0x20, 0x7d, 0x55, 0x85, 0xee, 0x24, 0xa7, 0xa6, 0xcb, 0x0a, 0xec, + 0xa8, 0x13, 0x4a, 0xc4, 0xaa, 0x5a, 0x4c, 0xf9, 0x32, 0xc0, 0x4b, 0x65, + 0x47, 0x65, 0xba, 0x38, 0x57, 0x17, 0x0c, 0xdd, 0xe1, 0x68, 0xd1, 0x4f, + 0x3d, 0xb9, 0x0e, 0xdd, 0x3f, 0x53, 0xe6, 0x91, 0x0e, 0x33, 0xba, 0x77, + 0xc2, 0x03, 0xf5, 0x90, 0x60, 0x07, 0x07, 0x07, 0x07, 0x07, 0x07, 0x07, + ]); + var expPrivOrig = [ + '-----BEGIN RSA PRIVATE KEY-----', + 'MIIEpQIBAAKCAQEA7J/Xbhf65MX/rINuvmBmsvJqNPpP4knNVCk0lTtVwfUfFj9qn+WTfC', + 'Xgkttjjbu0wiR8nGlM4KchrPzTRDsar55gkwnTrLRliDmFjNIELK+FJ5JZGyhzmfnBbDcI', + 'ondYWoywlsBjihAQhNj6HrcnWMFrNLTmoQVogsnjam8LtfcTqIsUSm4KcjmmH6BPy3LZ5W', + 'Fhp2NTrmbWurTamJK0UJM235r+WDZtMdf/AYjkSTtxjwnmb8nhmFGMxPoWuUUUe54OCW0H', + 'HnlOobbw3Qo9o5z061+qKTeMtAMlrOhk9ge+yshIOVGvNkLdMn03et3Wv1f5ENef5LjIpx', + 'sMiWnwnQIDAQABAoIBAQDLrZrmbUXNfpFhkZDV1m0QQ5IgIAZ7JkPT2dQlUHm1BqXHpLbH', + 'A/o7tu7j+mxrJ9Oj+Xw5tnoyNirKmKXR6X5DBPTkHGVUF8P9ymWmnXBydnMLaPHCasN3Go', + 'DgAU8xacFn0AqA+QG1abaOY6aIoecAgx0gtUZ//APF8LoMd6GA8pDPClcUoMcwWxAseIWi', + 'Un7xqNC8eD5+9sXFrPMC1NwCILjMN+GqLSTtRPt3IOXEOGMI4pLSat8Ky1SXkcdVnDEA4v', + 'Il2ths6m3y/5gWF/X6XNj9ivfqNaCWve3TNRzueKMf/QwkS61PpQiz15DnCGBSJu6TQIDR', + 'r690TTtvTkKRAoGBAP4mnYGh8+JIrSJN9BxoTeHi/wm/phIKFs7WNHN9VdM1b7JNZMteXh', + 'rIzylYKDDsKZUJGxGAuuAWmUl2s0JyBRDy9ag2hPGDV10nrumuE0fz2rFC3qElx3LuLjR4', + 'ZGrtkYGvHtPtaJE3L91XXSo6IYZ0bf8LVqzCZTqIkV48EH5TAoGBAO5YlMyWG5pjhEV8kn', + 'g1F35/fW8Gd2lcxOjHGdJeWNId7kzy2cvKKifsW1U3Zg0s5v1INVFmEx6rcNrmRawliyuJ', + 'SxmZagaBJNWpPPbEKDlwJA2NzWm0ZXgNr09o46ydrweTEOvJQIyChYUWwPxJdgNjeiyVYc', + 'g7B3loYLHI8JdPAoGBAOSjPKM4XT0/AHKSCn/b3eTO332XqgEkjmw5DCqxoJpHwlp3gavr', + 'E2GpMaUSJ+YLL0ViUbWnR3b9HZ2XaaDnDmO3DwTrNyJGdDvbzWFwNuxOFnnNnJcAc7OTTo', + 'HppP0FCBfQwT0KoT2ylh7bz3aDoVFiQOpm+uykXYkQ/yX1hyj9AoGADrQiQdjEzSp0f4Dm', + '3EmSMHiW+WFxvms9roqR2jt9yUCVcePNcdf/78SSAdjQD+IEQf3RZDsi1NKIvMhV5f/O7R', + 'mkK2kedFZFOnUdUK/bN2fmoXts/6dkVxqpBQIYgY6cvJ/i/FjHBaQLrqEvuKCjiiP55oQ0', + 'qxCRLnk09eLKjNsCgYEAjp7Jwt/3Nu9uKjbr16RSQyeNqlItqMFm9J/GeJ4xZOZWyW2FeS', + 'pcylMrGkb0FmD+Qc/HdFfQBvTJ+kecosvghcWVnzXdTBV62jTEgSB9VYXuJKemywrsqBNK', + 'xKpaTPkywEtlR2W6OFcXDN3haNFPPbkO3T9T5pEOM7p3wgP1kGAHBwcHBwcH', + '-----END RSA PRIVATE KEY-----' + ].join('\n'); + assert(keyInfo.ppk === undefined, makeMsg(what, 'Unexpected PPK flag')); + assert(keyInfo._converted === undefined, + makeMsg(what, 'Unexpected automatic private PEM generation')); + assert(keyInfo._macresult === undefined, + makeMsg(what, 'Unexpected MAC verification')); + assert(keyInfo._decrypted === true, + makeMsg(what, 'Expected decrypted flag')); + assert.deepEqual(keyInfo.private, + expPriv, + makeMsg(what, 'Decrypted private data mismatch')); + assert.deepEqual(keyInfo.privateOrig, + expPrivOrig, + makeMsg(what, 'Decrypted private PEM data mismatch')); + next(); + }, + what: 'decryptKey - with encrypted RSA' + }, + { run: function() { + var pubkey = [ + '---- BEGIN SSH2 PUBLIC KEY ----', + 'Comment: "dsa-key-20151028"', + 'AAAAB3NzaC1kc3MAAAEBAJ0Gth9JHw/a8RmY3Y0UFqBWVWkzWxkzG+DR2oqHwTIq', + 'jAi9Xr06oSbdmXd3Jl3bHsbd2gVq4+/j32s0uIf6FEW7mFooSiDOcRWARJSAAmkI', + 'T9ep//ag+gNUmwhtPebliCqcAn9VWE7wq2v0FJsG3trFW/pvi/hVsBOrkz4Qieqb', + 'KbwNwZc/MI+h9KAQPV7tWN2y5aG3jlVD9PERyeFPlmYkD1P+IqytxvL3thUFKeru', + 'N8w+hjKIGGonEcVzWRJ9UUBQqAaNNH4/9mzedpS8CivfnUvsIw9rSTB4N+Wf70jb', + '6a2qD1mHs1DqOkX136UOn5HkFldBnny0NSmlR/LewQkAAAAVAMu94kPPMR0Ew6Zh', + 'mAoJJc0RjisBAAABAGkOU/b/I0opGITGCx9qFEcqiJ/VJHVsYhQgM/jCkydEc9kW', + 'yjY+wKulGWpmA8wGmYm9j4IAgMGEXjBR8dyYJNZVXK0JOJmWrqUj5Q1GCUS5hCyU', + 'iA7nmVQ4syhGE49aFBLFdyKS6t7//swEEEV+6Hw9qcQWB98zoD8qdPGz3W/9kNXB', + 'OgVHWyqfWsbA/7MW2vjjF/u2EJe8YRKIJnodLOSNwPf0iCmj1HdaIm5N2Nl1k/6/', + '9MwlY6tjn4hinrEN/pOiC1ci/1ADmTq4L9upi1Paix51zD8Yp7q3SxOgZqFU0ELF', + 'VP/XHokm278t1mE9hxDwkepv7XgBda8uamWzwSoAAAEAYl2bjiCjIB68+DNuRgtf', + 'lvVk00nOH3dYXSslwKIFTivYDczjz0splaLsEhrdTRiOXyVsCEDhYtlvWlTw34rg', + 's2QoutpqISOiq26XwPdOlejD7Hy7gtw3yRyrhbXHYHE0nOvx0/SP7il4ub//QRTd', + '7cUPao2f359cGpap84anqKJjF3m4oRGdZGhTAQPqtGMkchZvItKyZe6pJ9HhsE7h', + 'NMsxPHAUon8QwNL1v+JkHg7i+Oe8rEZx/51m/qGVtXLN+z885lsqzuwe9KhY5I8C', + 'C3f8nR+Mivfp1ce9pSMKCpdRASzOBuykZKYZmns6SA0UqAp7ZLDKubbhk9ZLVyAO', + 'dA==', + '---- END SSH2 PUBLIC KEY ----' + ].join('\n'); + assert.doesNotThrow(function() { + var res = utils.genPublicKey(utils.parseKey(pubkey)); + assert.deepEqual( + res, + { type: 'dss', + fulltype: 'ssh-dss', + curve: undefined, + public: new Buffer([ + 0x00, 0x00, 0x00, 0x07, 0x73, 0x73, 0x68, 0x2d, 0x64, 0x73, 0x73, + 0x00, 0x00, 0x01, 0x01, 0x00, 0x9d, 0x06, 0xb6, 0x1f, 0x49, 0x1f, + 0x0f, 0xda, 0xf1, 0x19, 0x98, 0xdd, 0x8d, 0x14, 0x16, 0xa0, 0x56, + 0x55, 0x69, 0x33, 0x5b, 0x19, 0x33, 0x1b, 0xe0, 0xd1, 0xda, 0x8a, + 0x87, 0xc1, 0x32, 0x2a, 0x8c, 0x08, 0xbd, 0x5e, 0xbd, 0x3a, 0xa1, + 0x26, 0xdd, 0x99, 0x77, 0x77, 0x26, 0x5d, 0xdb, 0x1e, 0xc6, 0xdd, + 0xda, 0x05, 0x6a, 0xe3, 0xef, 0xe3, 0xdf, 0x6b, 0x34, 0xb8, 0x87, + 0xfa, 0x14, 0x45, 0xbb, 0x98, 0x5a, 0x28, 0x4a, 0x20, 0xce, 0x71, + 0x15, 0x80, 0x44, 0x94, 0x80, 0x02, 0x69, 0x08, 0x4f, 0xd7, 0xa9, + 0xff, 0xf6, 0xa0, 0xfa, 0x03, 0x54, 0x9b, 0x08, 0x6d, 0x3d, 0xe6, + 0xe5, 0x88, 0x2a, 0x9c, 0x02, 0x7f, 0x55, 0x58, 0x4e, 0xf0, 0xab, + 0x6b, 0xf4, 0x14, 0x9b, 0x06, 0xde, 0xda, 0xc5, 0x5b, 0xfa, 0x6f, + 0x8b, 0xf8, 0x55, 0xb0, 0x13, 0xab, 0x93, 0x3e, 0x10, 0x89, 0xea, + 0x9b, 0x29, 0xbc, 0x0d, 0xc1, 0x97, 0x3f, 0x30, 0x8f, 0xa1, 0xf4, + 0xa0, 0x10, 0x3d, 0x5e, 0xed, 0x58, 0xdd, 0xb2, 0xe5, 0xa1, 0xb7, + 0x8e, 0x55, 0x43, 0xf4, 0xf1, 0x11, 0xc9, 0xe1, 0x4f, 0x96, 0x66, + 0x24, 0x0f, 0x53, 0xfe, 0x22, 0xac, 0xad, 0xc6, 0xf2, 0xf7, 0xb6, + 0x15, 0x05, 0x29, 0xea, 0xee, 0x37, 0xcc, 0x3e, 0x86, 0x32, 0x88, + 0x18, 0x6a, 0x27, 0x11, 0xc5, 0x73, 0x59, 0x12, 0x7d, 0x51, 0x40, + 0x50, 0xa8, 0x06, 0x8d, 0x34, 0x7e, 0x3f, 0xf6, 0x6c, 0xde, 0x76, + 0x94, 0xbc, 0x0a, 0x2b, 0xdf, 0x9d, 0x4b, 0xec, 0x23, 0x0f, 0x6b, + 0x49, 0x30, 0x78, 0x37, 0xe5, 0x9f, 0xef, 0x48, 0xdb, 0xe9, 0xad, + 0xaa, 0x0f, 0x59, 0x87, 0xb3, 0x50, 0xea, 0x3a, 0x45, 0xf5, 0xdf, + 0xa5, 0x0e, 0x9f, 0x91, 0xe4, 0x16, 0x57, 0x41, 0x9e, 0x7c, 0xb4, + 0x35, 0x29, 0xa5, 0x47, 0xf2, 0xde, 0xc1, 0x09, 0x00, 0x00, 0x00, + 0x15, 0x00, 0xcb, 0xbd, 0xe2, 0x43, 0xcf, 0x31, 0x1d, 0x04, 0xc3, + 0xa6, 0x61, 0x98, 0x0a, 0x09, 0x25, 0xcd, 0x11, 0x8e, 0x2b, 0x01, + 0x00, 0x00, 0x01, 0x00, 0x69, 0x0e, 0x53, 0xf6, 0xff, 0x23, 0x4a, + 0x29, 0x18, 0x84, 0xc6, 0x0b, 0x1f, 0x6a, 0x14, 0x47, 0x2a, 0x88, + 0x9f, 0xd5, 0x24, 0x75, 0x6c, 0x62, 0x14, 0x20, 0x33, 0xf8, 0xc2, + 0x93, 0x27, 0x44, 0x73, 0xd9, 0x16, 0xca, 0x36, 0x3e, 0xc0, 0xab, + 0xa5, 0x19, 0x6a, 0x66, 0x03, 0xcc, 0x06, 0x99, 0x89, 0xbd, 0x8f, + 0x82, 0x00, 0x80, 0xc1, 0x84, 0x5e, 0x30, 0x51, 0xf1, 0xdc, 0x98, + 0x24, 0xd6, 0x55, 0x5c, 0xad, 0x09, 0x38, 0x99, 0x96, 0xae, 0xa5, + 0x23, 0xe5, 0x0d, 0x46, 0x09, 0x44, 0xb9, 0x84, 0x2c, 0x94, 0x88, + 0x0e, 0xe7, 0x99, 0x54, 0x38, 0xb3, 0x28, 0x46, 0x13, 0x8f, 0x5a, + 0x14, 0x12, 0xc5, 0x77, 0x22, 0x92, 0xea, 0xde, 0xff, 0xfe, 0xcc, + 0x04, 0x10, 0x45, 0x7e, 0xe8, 0x7c, 0x3d, 0xa9, 0xc4, 0x16, 0x07, + 0xdf, 0x33, 0xa0, 0x3f, 0x2a, 0x74, 0xf1, 0xb3, 0xdd, 0x6f, 0xfd, + 0x90, 0xd5, 0xc1, 0x3a, 0x05, 0x47, 0x5b, 0x2a, 0x9f, 0x5a, 0xc6, + 0xc0, 0xff, 0xb3, 0x16, 0xda, 0xf8, 0xe3, 0x17, 0xfb, 0xb6, 0x10, + 0x97, 0xbc, 0x61, 0x12, 0x88, 0x26, 0x7a, 0x1d, 0x2c, 0xe4, 0x8d, + 0xc0, 0xf7, 0xf4, 0x88, 0x29, 0xa3, 0xd4, 0x77, 0x5a, 0x22, 0x6e, + 0x4d, 0xd8, 0xd9, 0x75, 0x93, 0xfe, 0xbf, 0xf4, 0xcc, 0x25, 0x63, + 0xab, 0x63, 0x9f, 0x88, 0x62, 0x9e, 0xb1, 0x0d, 0xfe, 0x93, 0xa2, + 0x0b, 0x57, 0x22, 0xff, 0x50, 0x03, 0x99, 0x3a, 0xb8, 0x2f, 0xdb, + 0xa9, 0x8b, 0x53, 0xda, 0x8b, 0x1e, 0x75, 0xcc, 0x3f, 0x18, 0xa7, + 0xba, 0xb7, 0x4b, 0x13, 0xa0, 0x66, 0xa1, 0x54, 0xd0, 0x42, 0xc5, + 0x54, 0xff, 0xd7, 0x1e, 0x89, 0x26, 0xdb, 0xbf, 0x2d, 0xd6, 0x61, + 0x3d, 0x87, 0x10, 0xf0, 0x91, 0xea, 0x6f, 0xed, 0x78, 0x01, 0x75, + 0xaf, 0x2e, 0x6a, 0x65, 0xb3, 0xc1, 0x2a, 0x00, 0x00, 0x01, 0x00, + 0x62, 0x5d, 0x9b, 0x8e, 0x20, 0xa3, 0x20, 0x1e, 0xbc, 0xf8, 0x33, + 0x6e, 0x46, 0x0b, 0x5f, 0x96, 0xf5, 0x64, 0xd3, 0x49, 0xce, 0x1f, + 0x77, 0x58, 0x5d, 0x2b, 0x25, 0xc0, 0xa2, 0x05, 0x4e, 0x2b, 0xd8, + 0x0d, 0xcc, 0xe3, 0xcf, 0x4b, 0x29, 0x95, 0xa2, 0xec, 0x12, 0x1a, + 0xdd, 0x4d, 0x18, 0x8e, 0x5f, 0x25, 0x6c, 0x08, 0x40, 0xe1, 0x62, + 0xd9, 0x6f, 0x5a, 0x54, 0xf0, 0xdf, 0x8a, 0xe0, 0xb3, 0x64, 0x28, + 0xba, 0xda, 0x6a, 0x21, 0x23, 0xa2, 0xab, 0x6e, 0x97, 0xc0, 0xf7, + 0x4e, 0x95, 0xe8, 0xc3, 0xec, 0x7c, 0xbb, 0x82, 0xdc, 0x37, 0xc9, + 0x1c, 0xab, 0x85, 0xb5, 0xc7, 0x60, 0x71, 0x34, 0x9c, 0xeb, 0xf1, + 0xd3, 0xf4, 0x8f, 0xee, 0x29, 0x78, 0xb9, 0xbf, 0xff, 0x41, 0x14, + 0xdd, 0xed, 0xc5, 0x0f, 0x6a, 0x8d, 0x9f, 0xdf, 0x9f, 0x5c, 0x1a, + 0x96, 0xa9, 0xf3, 0x86, 0xa7, 0xa8, 0xa2, 0x63, 0x17, 0x79, 0xb8, + 0xa1, 0x11, 0x9d, 0x64, 0x68, 0x53, 0x01, 0x03, 0xea, 0xb4, 0x63, + 0x24, 0x72, 0x16, 0x6f, 0x22, 0xd2, 0xb2, 0x65, 0xee, 0xa9, 0x27, + 0xd1, 0xe1, 0xb0, 0x4e, 0xe1, 0x34, 0xcb, 0x31, 0x3c, 0x70, 0x14, + 0xa2, 0x7f, 0x10, 0xc0, 0xd2, 0xf5, 0xbf, 0xe2, 0x64, 0x1e, 0x0e, + 0xe2, 0xf8, 0xe7, 0xbc, 0xac, 0x46, 0x71, 0xff, 0x9d, 0x66, 0xfe, + 0xa1, 0x95, 0xb5, 0x72, 0xcd, 0xfb, 0x3f, 0x3c, 0xe6, 0x5b, 0x2a, + 0xce, 0xec, 0x1e, 0xf4, 0xa8, 0x58, 0xe4, 0x8f, 0x02, 0x0b, 0x77, + 0xfc, 0x9d, 0x1f, 0x8c, 0x8a, 0xf7, 0xe9, 0xd5, 0xc7, 0xbd, 0xa5, + 0x23, 0x0a, 0x0a, 0x97, 0x51, 0x01, 0x2c, 0xce, 0x06, 0xec, 0xa4, + 0x64, 0xa6, 0x19, 0x9a, 0x7b, 0x3a, 0x48, 0x0d, 0x14, 0xa8, 0x0a, + 0x7b, 0x64, 0xb0, 0xca, 0xb9, 0xb6, 0xe1, 0x93, 0xd6, 0x4b, 0x57, + 0x20, 0x0e, 0x74, + ]), + publicOrig: new Buffer([ + 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, + 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, + 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x44, 0x4f, 0x6a, + 0x43, 0x43, 0x41, 0x69, 0x30, 0x47, 0x42, 0x79, 0x71, 0x47, 0x53, + 0x4d, 0x34, 0x34, 0x42, 0x41, 0x45, 0x77, 0x67, 0x67, 0x49, 0x67, + 0x41, 0x6f, 0x49, 0x42, 0x41, 0x51, 0x43, 0x64, 0x42, 0x72, 0x59, + 0x66, 0x53, 0x52, 0x38, 0x50, 0x32, 0x76, 0x45, 0x5a, 0x6d, 0x4e, + 0x32, 0x4e, 0x46, 0x42, 0x61, 0x67, 0x56, 0x6c, 0x56, 0x70, 0x4d, + 0x31, 0x73, 0x5a, 0x0a, 0x4d, 0x78, 0x76, 0x67, 0x30, 0x64, 0x71, + 0x4b, 0x68, 0x38, 0x45, 0x79, 0x4b, 0x6f, 0x77, 0x49, 0x76, 0x56, + 0x36, 0x39, 0x4f, 0x71, 0x45, 0x6d, 0x33, 0x5a, 0x6c, 0x33, 0x64, + 0x79, 0x5a, 0x64, 0x32, 0x78, 0x37, 0x47, 0x33, 0x64, 0x6f, 0x46, + 0x61, 0x75, 0x50, 0x76, 0x34, 0x39, 0x39, 0x72, 0x4e, 0x4c, 0x69, + 0x48, 0x2b, 0x68, 0x52, 0x46, 0x75, 0x35, 0x68, 0x61, 0x4b, 0x45, + 0x6f, 0x67, 0x0a, 0x7a, 0x6e, 0x45, 0x56, 0x67, 0x45, 0x53, 0x55, + 0x67, 0x41, 0x4a, 0x70, 0x43, 0x45, 0x2f, 0x58, 0x71, 0x66, 0x2f, + 0x32, 0x6f, 0x50, 0x6f, 0x44, 0x56, 0x4a, 0x73, 0x49, 0x62, 0x54, + 0x33, 0x6d, 0x35, 0x59, 0x67, 0x71, 0x6e, 0x41, 0x4a, 0x2f, 0x56, + 0x56, 0x68, 0x4f, 0x38, 0x4b, 0x74, 0x72, 0x39, 0x42, 0x53, 0x62, + 0x42, 0x74, 0x37, 0x61, 0x78, 0x56, 0x76, 0x36, 0x62, 0x34, 0x76, + 0x34, 0x0a, 0x56, 0x62, 0x41, 0x54, 0x71, 0x35, 0x4d, 0x2b, 0x45, + 0x49, 0x6e, 0x71, 0x6d, 0x79, 0x6d, 0x38, 0x44, 0x63, 0x47, 0x58, + 0x50, 0x7a, 0x43, 0x50, 0x6f, 0x66, 0x53, 0x67, 0x45, 0x44, 0x31, + 0x65, 0x37, 0x56, 0x6a, 0x64, 0x73, 0x75, 0x57, 0x68, 0x74, 0x34, + 0x35, 0x56, 0x51, 0x2f, 0x54, 0x78, 0x45, 0x63, 0x6e, 0x68, 0x54, + 0x35, 0x5a, 0x6d, 0x4a, 0x41, 0x39, 0x54, 0x2f, 0x69, 0x4b, 0x73, + 0x0a, 0x72, 0x63, 0x62, 0x79, 0x39, 0x37, 0x59, 0x56, 0x42, 0x53, + 0x6e, 0x71, 0x37, 0x6a, 0x66, 0x4d, 0x50, 0x6f, 0x59, 0x79, 0x69, + 0x42, 0x68, 0x71, 0x4a, 0x78, 0x48, 0x46, 0x63, 0x31, 0x6b, 0x53, + 0x66, 0x56, 0x46, 0x41, 0x55, 0x4b, 0x67, 0x47, 0x6a, 0x54, 0x52, + 0x2b, 0x50, 0x2f, 0x5a, 0x73, 0x33, 0x6e, 0x61, 0x55, 0x76, 0x41, + 0x6f, 0x72, 0x33, 0x35, 0x31, 0x4c, 0x37, 0x43, 0x4d, 0x50, 0x0a, + 0x61, 0x30, 0x6b, 0x77, 0x65, 0x44, 0x66, 0x6c, 0x6e, 0x2b, 0x39, + 0x49, 0x32, 0x2b, 0x6d, 0x74, 0x71, 0x67, 0x39, 0x5a, 0x68, 0x37, + 0x4e, 0x51, 0x36, 0x6a, 0x70, 0x46, 0x39, 0x64, 0x2b, 0x6c, 0x44, + 0x70, 0x2b, 0x52, 0x35, 0x42, 0x5a, 0x58, 0x51, 0x5a, 0x35, 0x38, + 0x74, 0x44, 0x55, 0x70, 0x70, 0x55, 0x66, 0x79, 0x33, 0x73, 0x45, + 0x4a, 0x41, 0x68, 0x55, 0x41, 0x79, 0x37, 0x33, 0x69, 0x0a, 0x51, + 0x38, 0x38, 0x78, 0x48, 0x51, 0x54, 0x44, 0x70, 0x6d, 0x47, 0x59, + 0x43, 0x67, 0x6b, 0x6c, 0x7a, 0x52, 0x47, 0x4f, 0x4b, 0x77, 0x45, + 0x43, 0x67, 0x67, 0x45, 0x41, 0x61, 0x51, 0x35, 0x54, 0x39, 0x76, + 0x38, 0x6a, 0x53, 0x69, 0x6b, 0x59, 0x68, 0x4d, 0x59, 0x4c, 0x48, + 0x32, 0x6f, 0x55, 0x52, 0x79, 0x71, 0x49, 0x6e, 0x39, 0x55, 0x6b, + 0x64, 0x57, 0x78, 0x69, 0x46, 0x43, 0x41, 0x7a, 0x0a, 0x2b, 0x4d, + 0x4b, 0x54, 0x4a, 0x30, 0x52, 0x7a, 0x32, 0x52, 0x62, 0x4b, 0x4e, + 0x6a, 0x37, 0x41, 0x71, 0x36, 0x55, 0x5a, 0x61, 0x6d, 0x59, 0x44, + 0x7a, 0x41, 0x61, 0x5a, 0x69, 0x62, 0x32, 0x50, 0x67, 0x67, 0x43, + 0x41, 0x77, 0x59, 0x52, 0x65, 0x4d, 0x46, 0x48, 0x78, 0x33, 0x4a, + 0x67, 0x6b, 0x31, 0x6c, 0x56, 0x63, 0x72, 0x51, 0x6b, 0x34, 0x6d, + 0x5a, 0x61, 0x75, 0x70, 0x53, 0x50, 0x6c, 0x0a, 0x44, 0x55, 0x59, + 0x4a, 0x52, 0x4c, 0x6d, 0x45, 0x4c, 0x4a, 0x53, 0x49, 0x44, 0x75, + 0x65, 0x5a, 0x56, 0x44, 0x69, 0x7a, 0x4b, 0x45, 0x59, 0x54, 0x6a, + 0x31, 0x6f, 0x55, 0x45, 0x73, 0x56, 0x33, 0x49, 0x70, 0x4c, 0x71, + 0x33, 0x76, 0x2f, 0x2b, 0x7a, 0x41, 0x51, 0x51, 0x52, 0x58, 0x37, + 0x6f, 0x66, 0x44, 0x32, 0x70, 0x78, 0x42, 0x59, 0x48, 0x33, 0x7a, + 0x4f, 0x67, 0x50, 0x79, 0x70, 0x30, 0x0a, 0x38, 0x62, 0x50, 0x64, + 0x62, 0x2f, 0x32, 0x51, 0x31, 0x63, 0x45, 0x36, 0x42, 0x55, 0x64, + 0x62, 0x4b, 0x70, 0x39, 0x61, 0x78, 0x73, 0x44, 0x2f, 0x73, 0x78, + 0x62, 0x61, 0x2b, 0x4f, 0x4d, 0x58, 0x2b, 0x37, 0x59, 0x51, 0x6c, + 0x37, 0x78, 0x68, 0x45, 0x6f, 0x67, 0x6d, 0x65, 0x68, 0x30, 0x73, + 0x35, 0x49, 0x33, 0x41, 0x39, 0x2f, 0x53, 0x49, 0x4b, 0x61, 0x50, + 0x55, 0x64, 0x31, 0x6f, 0x69, 0x0a, 0x62, 0x6b, 0x33, 0x59, 0x32, + 0x58, 0x57, 0x54, 0x2f, 0x72, 0x2f, 0x30, 0x7a, 0x43, 0x56, 0x6a, + 0x71, 0x32, 0x4f, 0x66, 0x69, 0x47, 0x4b, 0x65, 0x73, 0x51, 0x33, + 0x2b, 0x6b, 0x36, 0x49, 0x4c, 0x56, 0x79, 0x4c, 0x2f, 0x55, 0x41, + 0x4f, 0x5a, 0x4f, 0x72, 0x67, 0x76, 0x32, 0x36, 0x6d, 0x4c, 0x55, + 0x39, 0x71, 0x4c, 0x48, 0x6e, 0x58, 0x4d, 0x50, 0x78, 0x69, 0x6e, + 0x75, 0x72, 0x64, 0x4c, 0x0a, 0x45, 0x36, 0x42, 0x6d, 0x6f, 0x56, + 0x54, 0x51, 0x51, 0x73, 0x56, 0x55, 0x2f, 0x39, 0x63, 0x65, 0x69, + 0x53, 0x62, 0x62, 0x76, 0x79, 0x33, 0x57, 0x59, 0x54, 0x32, 0x48, + 0x45, 0x50, 0x43, 0x52, 0x36, 0x6d, 0x2f, 0x74, 0x65, 0x41, 0x46, + 0x31, 0x72, 0x79, 0x35, 0x71, 0x5a, 0x62, 0x50, 0x42, 0x4b, 0x67, + 0x4f, 0x43, 0x41, 0x51, 0x55, 0x41, 0x41, 0x6f, 0x49, 0x42, 0x41, + 0x47, 0x4a, 0x64, 0x0a, 0x6d, 0x34, 0x34, 0x67, 0x6f, 0x79, 0x41, + 0x65, 0x76, 0x50, 0x67, 0x7a, 0x62, 0x6b, 0x59, 0x4c, 0x58, 0x35, + 0x62, 0x31, 0x5a, 0x4e, 0x4e, 0x4a, 0x7a, 0x68, 0x39, 0x33, 0x57, + 0x46, 0x30, 0x72, 0x4a, 0x63, 0x43, 0x69, 0x42, 0x55, 0x34, 0x72, + 0x32, 0x41, 0x33, 0x4d, 0x34, 0x38, 0x39, 0x4c, 0x4b, 0x5a, 0x57, + 0x69, 0x37, 0x42, 0x49, 0x61, 0x33, 0x55, 0x30, 0x59, 0x6a, 0x6c, + 0x38, 0x6c, 0x0a, 0x62, 0x41, 0x68, 0x41, 0x34, 0x57, 0x4c, 0x5a, + 0x62, 0x31, 0x70, 0x55, 0x38, 0x4e, 0x2b, 0x4b, 0x34, 0x4c, 0x4e, + 0x6b, 0x4b, 0x4c, 0x72, 0x61, 0x61, 0x69, 0x45, 0x6a, 0x6f, 0x71, + 0x74, 0x75, 0x6c, 0x38, 0x44, 0x33, 0x54, 0x70, 0x58, 0x6f, 0x77, + 0x2b, 0x78, 0x38, 0x75, 0x34, 0x4c, 0x63, 0x4e, 0x38, 0x6b, 0x63, + 0x71, 0x34, 0x57, 0x31, 0x78, 0x32, 0x42, 0x78, 0x4e, 0x4a, 0x7a, + 0x72, 0x0a, 0x38, 0x64, 0x50, 0x30, 0x6a, 0x2b, 0x34, 0x70, 0x65, + 0x4c, 0x6d, 0x2f, 0x2f, 0x30, 0x45, 0x55, 0x33, 0x65, 0x33, 0x46, + 0x44, 0x32, 0x71, 0x4e, 0x6e, 0x39, 0x2b, 0x66, 0x58, 0x42, 0x71, + 0x57, 0x71, 0x66, 0x4f, 0x47, 0x70, 0x36, 0x69, 0x69, 0x59, 0x78, + 0x64, 0x35, 0x75, 0x4b, 0x45, 0x52, 0x6e, 0x57, 0x52, 0x6f, 0x55, + 0x77, 0x45, 0x44, 0x36, 0x72, 0x52, 0x6a, 0x4a, 0x48, 0x49, 0x57, + 0x0a, 0x62, 0x79, 0x4c, 0x53, 0x73, 0x6d, 0x58, 0x75, 0x71, 0x53, + 0x66, 0x52, 0x34, 0x62, 0x42, 0x4f, 0x34, 0x54, 0x54, 0x4c, 0x4d, + 0x54, 0x78, 0x77, 0x46, 0x4b, 0x4a, 0x2f, 0x45, 0x4d, 0x44, 0x53, + 0x39, 0x62, 0x2f, 0x69, 0x5a, 0x42, 0x34, 0x4f, 0x34, 0x76, 0x6a, + 0x6e, 0x76, 0x4b, 0x78, 0x47, 0x63, 0x66, 0x2b, 0x64, 0x5a, 0x76, + 0x36, 0x68, 0x6c, 0x62, 0x56, 0x79, 0x7a, 0x66, 0x73, 0x2f, 0x0a, + 0x50, 0x4f, 0x5a, 0x62, 0x4b, 0x73, 0x37, 0x73, 0x48, 0x76, 0x53, + 0x6f, 0x57, 0x4f, 0x53, 0x50, 0x41, 0x67, 0x74, 0x33, 0x2f, 0x4a, + 0x30, 0x66, 0x6a, 0x49, 0x72, 0x33, 0x36, 0x64, 0x58, 0x48, 0x76, + 0x61, 0x55, 0x6a, 0x43, 0x67, 0x71, 0x58, 0x55, 0x51, 0x45, 0x73, + 0x7a, 0x67, 0x62, 0x73, 0x70, 0x47, 0x53, 0x6d, 0x47, 0x5a, 0x70, + 0x37, 0x4f, 0x6b, 0x67, 0x4e, 0x46, 0x4b, 0x67, 0x4b, 0x0a, 0x65, + 0x32, 0x53, 0x77, 0x79, 0x72, 0x6d, 0x32, 0x34, 0x5a, 0x50, 0x57, + 0x53, 0x31, 0x63, 0x67, 0x44, 0x6e, 0x51, 0x3d, 0x0a, 0x2d, 0x2d, + 0x2d, 0x2d, 0x2d, 0x45, 0x4e, 0x44, 0x20, 0x50, 0x55, 0x42, 0x4c, + 0x49, 0x43, 0x20, 0x4b, 0x45, 0x59, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d + ]) + } + ); + next(); + }); + }, + what: 'Generate public key from parsed public key' + }, +]; + +function next() { + if (Array.isArray(process._events.exit)) + process._events.exit = process._events.exit[1]; + if (++t === tests.length) + return; + + var v = tests[t]; + process.nextTick(function() { + v.run.call(v); + }); +} + +function makeMsg(what, msg) { + return '[' + group + what + ']: ' + msg; +} + +process.once('exit', function() { + assert(t === tests.length, + makeMsg('_exit', + 'Only finished ' + t + '/' + tests.length + ' tests')); +}); + +next(); diff --git a/reverse_engineering/node_modules/ssh2-streams/test/test.js b/reverse_engineering/node_modules/ssh2-streams/test/test.js new file mode 100644 index 0000000..4a91765 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2-streams/test/test.js @@ -0,0 +1,22 @@ +var spawn = require('child_process').spawn, + join = require('path').join; + +var files = require('fs').readdirSync(__dirname).filter(function(f) { + return (f.substr(0, 5) === 'test-'); + }).map(function(f) { + return join(__dirname, f); + }), + f = -1; + +function next() { + if (++f < files.length) { + spawn(process.argv[0], [ files[f] ], { stdio: 'inherit' }) + .on('exit', function(code) { + if (code === 0) + process.nextTick(next); + else + process.exit(code); + }); + } +} +next(); diff --git a/reverse_engineering/node_modules/ssh2/.travis.yml b/reverse_engineering/node_modules/ssh2/.travis.yml new file mode 100644 index 0000000..3511e64 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/.travis.yml @@ -0,0 +1,18 @@ +sudo: false +language: cpp +notifications: + email: false +env: + matrix: + - TRAVIS_NODE_VERSION="0.10" + - TRAVIS_NODE_VERSION="0.12" + - TRAVIS_NODE_VERSION="4" + - TRAVIS_NODE_VERSION="6" + - TRAVIS_NODE_VERSION="7" +install: + - rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION + - node --version + - npm --version + - npm install + - ssh -V +script: npm test diff --git a/reverse_engineering/node_modules/ssh2/LICENSE b/reverse_engineering/node_modules/ssh2/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2/README.md b/reverse_engineering/node_modules/ssh2/README.md new file mode 100644 index 0000000..39e5b6c --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/README.md @@ -0,0 +1,1053 @@ +Description +=========== + +SSH2 client and server modules written in pure JavaScript for [node.js](http://nodejs.org/). + +Development/testing is done against OpenSSH (7.1 currently). + +[Changes from v0.4.x-v0.5.x](https://github.com/mscdex/ssh2/wiki/Changes-from-0.4.x-to-0.5.x) + +[![Build Status](https://travis-ci.org/mscdex/ssh2.svg?branch=master)](https://travis-ci.org/mscdex/ssh2) + + +Requirements +============ + +* [node.js](http://nodejs.org/) -- v0.10 or newer + + +Install +======= + + npm install ssh2 + + +Client Examples +=============== + +* Execute `uptime` on a server: + +```javascript +var Client = require('ssh2').Client; + +var conn = new Client(); +conn.on('ready', function() { + console.log('Client :: ready'); + conn.exec('uptime', function(err, stream) { + if (err) throw err; + stream.on('close', function(code, signal) { + console.log('Stream :: close :: code: ' + code + ', signal: ' + signal); + conn.end(); + }).on('data', function(data) { + console.log('STDOUT: ' + data); + }).stderr.on('data', function(data) { + console.log('STDERR: ' + data); + }); + }); +}).connect({ + host: '192.168.100.100', + port: 22, + username: 'frylock', + privateKey: require('fs').readFileSync('/here/is/my/key') +}); + +// example output: +// Client :: ready +// STDOUT: 17:41:15 up 22 days, 18:09, 1 user, load average: 0.00, 0.01, 0.05 +// +// Stream :: exit :: code: 0, signal: undefined +// Stream :: close +``` + +* Start an interactive shell session: + +```javascript +var Client = require('ssh2').Client; + +var conn = new Client(); +conn.on('ready', function() { + console.log('Client :: ready'); + conn.shell(function(err, stream) { + if (err) throw err; + stream.on('close', function() { + console.log('Stream :: close'); + conn.end(); + }).on('data', function(data) { + console.log('STDOUT: ' + data); + }).stderr.on('data', function(data) { + console.log('STDERR: ' + data); + }); + stream.end('ls -l\nexit\n'); + }); +}).connect({ + host: '192.168.100.100', + port: 22, + username: 'frylock', + privateKey: require('fs').readFileSync('/here/is/my/key') +}); + +// example output: +// Client :: ready +// STDOUT: Last login: Sun Jun 15 09:37:21 2014 from 192.168.100.100 +// +// STDOUT: ls -l +// exit +// +// STDOUT: frylock@athf:~$ ls -l +// +// STDOUT: total 8 +// +// STDOUT: drwxr-xr-x 2 frylock frylock 4096 Nov 18 2012 mydir +// +// STDOUT: -rw-r--r-- 1 frylock frylock 25 Apr 11 2013 test.txt +// +// STDOUT: frylock@athf:~$ exit +// +// STDOUT: logout +// +// Stream :: close +``` + +* Send a raw HTTP request to port 80 on the server: + +```javascript +var Client = require('ssh2').Client; + +var conn = new Client(); +conn.on('ready', function() { + console.log('Client :: ready'); + conn.forwardOut('192.168.100.102', 8000, '127.0.0.1', 80, function(err, stream) { + if (err) throw err; + stream.on('close', function() { + console.log('TCP :: CLOSED'); + conn.end(); + }).on('data', function(data) { + console.log('TCP :: DATA: ' + data); + }).end([ + 'HEAD / HTTP/1.1', + 'User-Agent: curl/7.27.0', + 'Host: 127.0.0.1', + 'Accept: */*', + 'Connection: close', + '', + '' + ].join('\r\n')); + }); +}).connect({ + host: '192.168.100.100', + port: 22, + username: 'frylock', + password: 'nodejsrules' +}); + +// example output: +// Client :: ready +// TCP :: DATA: HTTP/1.1 200 OK +// Date: Thu, 15 Nov 2012 13:52:58 GMT +// Server: Apache/2.2.22 (Ubuntu) +// X-Powered-By: PHP/5.4.6-1ubuntu1 +// Last-Modified: Thu, 01 Jan 1970 00:00:00 GMT +// Content-Encoding: gzip +// Vary: Accept-Encoding +// Connection: close +// Content-Type: text/html; charset=UTF-8 +// +// +// TCP :: CLOSED +``` + +* Forward connections to 127.0.0.1:8000 on the server to us: + +```javascript +var Client = require('ssh2').Client; + +var conn = new Client(); +conn.on('ready', function() { + console.log('Client :: ready'); + conn.forwardIn('127.0.0.1', 8000, function(err) { + if (err) throw err; + console.log('Listening for connections on server on port 8000!'); + }); +}).on('tcp connection', function(info, accept, reject) { + console.log('TCP :: INCOMING CONNECTION:'); + console.dir(info); + accept().on('close', function() { + console.log('TCP :: CLOSED'); + }).on('data', function(data) { + console.log('TCP :: DATA: ' + data); + }).end([ + 'HTTP/1.1 404 Not Found', + 'Date: Thu, 15 Nov 2012 02:07:58 GMT', + 'Server: ForwardedConnection', + 'Content-Length: 0', + 'Connection: close', + '', + '' + ].join('\r\n')); +}).connect({ + host: '192.168.100.100', + port: 22, + username: 'frylock', + password: 'nodejsrules' +}); + +// example output: +// Client :: ready +// Listening for connections on server on port 8000! +// (.... then from another terminal on the server: `curl -I http://127.0.0.1:8000`) +// TCP :: INCOMING CONNECTION: { destIP: '127.0.0.1', +// destPort: 8000, +// srcIP: '127.0.0.1', +// srcPort: 41969 } +// TCP DATA: HEAD / HTTP/1.1 +// User-Agent: curl/7.27.0 +// Host: 127.0.0.1:8000 +// Accept: */* +// +// +// TCP :: CLOSED +``` + +* Get a directory listing via SFTP: + +```javascript +var Client = require('ssh2').Client; + +var conn = new Client(); +conn.on('ready', function() { + console.log('Client :: ready'); + conn.sftp(function(err, sftp) { + if (err) throw err; + sftp.readdir('foo', function(err, list) { + if (err) throw err; + console.dir(list); + conn.end(); + }); + }); +}).connect({ + host: '192.168.100.100', + port: 22, + username: 'frylock', + password: 'nodejsrules' +}); + +// example output: +// Client :: ready +// [ { filename: 'test.txt', +// longname: '-rw-r--r-- 1 frylock frylock 12 Nov 18 11:05 test.txt', +// attrs: +// { size: 12, +// uid: 1000, +// gid: 1000, +// mode: 33188, +// atime: 1353254750, +// mtime: 1353254744 } }, +// { filename: 'mydir', +// longname: 'drwxr-xr-x 2 frylock frylock 4096 Nov 18 15:03 mydir', +// attrs: +// { size: 1048576, +// uid: 1000, +// gid: 1000, +// mode: 16877, +// atime: 1353269007, +// mtime: 1353269007 } } ] +``` + +* Connection hopping: + +```javascript +var Client = require('ssh2').Client; + +var conn1 = new Client(); +var conn2 = new Client(); + +conn1.on('ready', function() { + console.log('FIRST :: connection ready'); + conn1.exec('nc 192.168.1.2 22', function(err, stream) { + if (err) { + console.log('FIRST :: exec error: ' + err); + return conn1.end(); + } + conn2.connect({ + sock: stream, + username: 'user2', + password: 'password2', + }); + }); +}).connect({ + host: '192.168.1.1', + username: 'user1', + password: 'password1', +}); + +conn2.on('ready', function() { + console.log('SECOND :: connection ready'); + conn2.exec('uptime', function(err, stream) { + if (err) { + console.log('SECOND :: exec error: ' + err); + return conn1.end(); + } + stream.on('end', function() { + conn1.end(); // close parent (and this) connection + }).on('data', function(data) { + console.log(data.toString()); + }); + }); +}); +``` + +* Forward X11 connections (xeyes in this case): + +```javascript +var net = require('net'); + +var Client = require('ssh2').Client; + +var conn = new Client(); + +conn.on('x11', function(info, accept, reject) { + var xserversock = new net.Socket(); + xserversock.on('connect', function() { + var xclientsock = accept(); + xclientsock.pipe(xserversock).pipe(xclientsock); + }); + // connects to localhost:0.0 + xserversock.connect(6000, 'localhost'); +}); + +conn.on('ready', function() { + conn.exec('xeyes', { x11: true }, function(err, stream) { + if (err) throw err; + var code = 0; + stream.on('end', function() { + if (code !== 0) + console.log('Do you have X11 forwarding enabled on your SSH server?'); + conn.end(); + }).on('exit', function(exitcode) { + code = exitcode; + }); + }); +}).connect({ + host: '192.168.1.1', + username: 'foo', + password: 'bar' +}); +``` + +* Dynamic (1:1) port forwarding using a SOCKSv5 proxy (using [socksv5](https://github.com/mscdex/socksv5)): + +```javascript +var socks = require('socksv5'); +var Client = require('ssh2').Client; + +var ssh_config = { + host: '192.168.100.1', + port: 22, + username: 'nodejs', + password: 'rules' +}; + +socks.createServer(function(info, accept, deny) { + // NOTE: you could just use one ssh2 client connection for all forwards, but + // you could run into server-imposed limits if you have too many forwards open + // at any given time + var conn = new Client(); + conn.on('ready', function() { + conn.forwardOut(info.srcAddr, + info.srcPort, + info.dstAddr, + info.dstPort, + function(err, stream) { + if (err) { + conn.end(); + return deny(); + } + + var clientSocket; + if (clientSocket = accept(true)) { + stream.pipe(clientSocket).pipe(stream).on('close', function() { + conn.end(); + }); + } else + conn.end(); + }); + }).on('error', function(err) { + deny(); + }).connect(ssh_config); +}).listen(1080, 'localhost', function() { + console.log('SOCKSv5 proxy server started on port 1080'); +}).useAuth(socks.auth.None()); + +// test with cURL: +// curl -i --socks5 localhost:1080 google.com +``` + +* Invoke an arbitrary subsystem (netconf in this case): + +```javascript +var Client = require('ssh2').Client; +var xmlhello = '' + + '' + + ' ' + + ' urn:ietf:params:netconf:base:1.0' + + ' ' + + ']]>]]>'; + +var conn = new Client(); + +conn.on('ready', function() { + console.log('Client :: ready'); + conn.subsys('netconf', function(err, stream) { + if (err) throw err; + stream.on('data', function(data) { + console.log(data); + }).write(xmlhello); + }); +}).connect({ + host: '1.2.3.4', + port: 22, + username: 'blargh', + password: 'honk' +}); +``` + +Server Examples +=============== + +* Only allow password and public key authentication and non-interactive (exec) command execution: + +```javascript +var fs = require('fs'); +var crypto = require('crypto'); +var inspect = require('util').inspect; + +var buffersEqual = require('buffer-equal-constant-time'); +var ssh2 = require('ssh2'); +var utils = ssh2.utils; + +var pubKey = utils.genPublicKey(utils.parseKey(fs.readFileSync('user.pub'))); + +new ssh2.Server({ + hostKeys: [fs.readFileSync('host.key')] +}, function(client) { + console.log('Client connected!'); + + client.on('authentication', function(ctx) { + if (ctx.method === 'password' + && ctx.username === 'foo' + && ctx.password === 'bar') + ctx.accept(); + else if (ctx.method === 'publickey' + && ctx.key.algo === pubKey.fulltype + && buffersEqual(ctx.key.data, pubKey.public)) { + if (ctx.signature) { + var verifier = crypto.createVerify(ctx.sigAlgo); + verifier.update(ctx.blob); + if (verifier.verify(pubKey.publicOrig, ctx.signature)) + ctx.accept(); + else + ctx.reject(); + } else { + // if no signature present, that means the client is just checking + // the validity of the given public key + ctx.accept(); + } + } else + ctx.reject(); + }).on('ready', function() { + console.log('Client authenticated!'); + + client.on('session', function(accept, reject) { + var session = accept(); + session.once('exec', function(accept, reject, info) { + console.log('Client wants to execute: ' + inspect(info.command)); + var stream = accept(); + stream.stderr.write('Oh no, the dreaded errors!\n'); + stream.write('Just kidding about the errors!\n'); + stream.exit(0); + stream.end(); + }); + }); + }).on('end', function() { + console.log('Client disconnected'); + }); +}).listen(0, '127.0.0.1', function() { + console.log('Listening on port ' + this.address().port); +}); +``` + +* SFTP only server: + +```javascript +var fs = require('fs'); + +var ssh2 = require('ssh2'); +var OPEN_MODE = ssh2.SFTP_OPEN_MODE; +var STATUS_CODE = ssh2.SFTP_STATUS_CODE; + +new ssh2.Server({ + hostKeys: [fs.readFileSync('host.key')] +}, function(client) { + console.log('Client connected!'); + + client.on('authentication', function(ctx) { + if (ctx.method === 'password' + && ctx.username === 'foo' + && ctx.password === 'bar') + ctx.accept(); + else + ctx.reject(); + }).on('ready', function() { + console.log('Client authenticated!'); + + client.on('session', function(accept, reject) { + var session = accept(); + session.on('sftp', function(accept, reject) { + console.log('Client SFTP session'); + var openFiles = {}; + var handleCount = 0; + // `sftpStream` is an `SFTPStream` instance in server mode + // see: https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md + var sftpStream = accept(); + sftpStream.on('OPEN', function(reqid, filename, flags, attrs) { + // only allow opening /tmp/foo.txt for writing + if (filename !== '/tmp/foo.txt' || !(flags & OPEN_MODE.WRITE)) + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + // create a fake handle to return to the client, this could easily + // be a real file descriptor number for example if actually opening + // the file on the disk + var handle = new Buffer(4); + openFiles[handleCount] = true; + handle.writeUInt32BE(handleCount++, 0, true); + sftpStream.handle(reqid, handle); + console.log('Opening file for write') + }).on('WRITE', function(reqid, handle, offset, data) { + if (handle.length !== 4 || !openFiles[handle.readUInt32BE(0, true)]) + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + // fake the write + sftpStream.status(reqid, STATUS_CODE.OK); + var inspected = require('util').inspect(data); + console.log('Write to file at offset %d: %s', offset, inspected); + }).on('CLOSE', function(reqid, handle) { + var fnum; + if (handle.length !== 4 || !openFiles[(fnum = handle.readUInt32BE(0, true))]) + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + delete openFiles[fnum]; + sftpStream.status(reqid, STATUS_CODE.OK); + console.log('Closing file'); + }); + }); + }); + }).on('end', function() { + console.log('Client disconnected'); + }); +}).listen(0, '127.0.0.1', function() { + console.log('Listening on port ' + this.address().port); +}); +``` + +* You can find more examples in the `examples` directory of this repository. + +API +=== + +`require('ssh2').Client` returns a **_Client_** constructor. + +`require('ssh2').Server` returns a **_Server_** constructor. + +`require('ssh2').utils` returns the [utility methods from `ssh2-streams`](https://github.com/mscdex/ssh2-streams#utility-methods). + +`require('ssh2').SFTP_STATUS_CODE` returns the [`SFTPStream.STATUS_CODE` from `ssh2-streams`](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md#sftpstream-static-constants). + +`require('ssh2').SFTP_OPEN_MODE` returns the [`SFTPStream.OPEN_MODE` from `ssh2-streams`](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md#sftpstream-static-constants). + +Client events +------------- + +* **banner**(< _string_ >message, < _string_ >language) - A notice was sent by the server upon connection. + +* **ready**() - Authentication was successful. + +* **tcp connection**(< _object_ >details, < _function_ >accept, < _function_ >reject) - An incoming forwarded TCP connection is being requested. Calling `accept` accepts the connection and returns a `Channel` object. Calling `reject` rejects the connection and no further action is needed. `details` contains: + + * **srcIP** - _string_ - The originating IP of the connection. + + * **srcPort** - _integer_ - The originating port of the connection. + + * **destIP** - _string_ - The remote IP the connection was received on (given in earlier call to `forwardIn()`). + + * **destPort** - _integer_ - The remote port the connection was received on (given in earlier call to `forwardIn()`). + +* **x11**(< _object_ >details, < _function_ >accept, < _function_ >reject) - An incoming X11 connection is being requested. Calling `accept` accepts the connection and returns a `Channel` object. Calling `reject` rejects the connection and no further action is needed. `details` contains: + + * **srcIP** - _string_ - The originating IP of the connection. + + * **srcPort** - _integer_ - The originating port of the connection. + +* **keyboard-interactive**(< _string_ >name, < _string_ >instructions, < _string_ >instructionsLang, < _array_ >prompts, < _function_ >finish) - The server is asking for replies to the given `prompts` for keyboard-interactive user authentication. `name` is generally what you'd use as a window title (for GUI apps). `prompts` is an array of `{ prompt: 'Password: ', echo: false }` style objects (here `echo` indicates whether user input should be displayed on the screen). The answers for all prompts must be provided as an array of strings and passed to `finish` when you are ready to continue. Note: It's possible for the server to come back and ask more questions. + +* **change password**(< _string_ >message, < _string_ >language, < _function_ >done) - If using password-based user authentication, the server has requested that the user's password be changed. Call `done` with the new password. + +* **continue**() - Emitted when more requests/data can be sent to the server (after a `Client` method returned `false`). + +* **error**(< _Error_ >err) - An error occurred. A 'level' property indicates 'client-socket' for socket-level errors and 'client-ssh' for SSH disconnection messages. In the case of 'client-ssh' messages, there may be a 'description' property that provides more detail. + +* **end**() - The socket was disconnected. + +* **close**(< _boolean_ >hadError) - The socket was closed. `hadError` is set to `true` if this was due to error. + + +Client methods +-------------- + +* **(constructor)**() - Creates and returns a new Client instance. + +* **connect**(< _object_ >config) - _(void)_ - Attempts a connection to a server using the information given in `config`: + + * **host** - _string_ - Hostname or IP address of the server. **Default:** `'localhost'` + + * **port** - _integer_ - Port number of the server. **Default:** `22` + + * **forceIPv4** - _boolean_ - Only connect via resolved IPv4 address for `host`. **Default:** `false` + + * **forceIPv6** - _boolean_ - Only connect via resolved IPv6 address for `host`. **Default:** `false` + + * **hostHash** - _string_ - 'md5' or 'sha1'. The host's key is hashed using this method and passed to the **hostVerifier** function. **Default:** (none) + + * **hostVerifier** - _function_ - Function with parameters `(hashedKey[, callback])` where `hashedKey` is a string hex hash of the host's key for verification purposes. Return `true` to continue with the handshake or `false` to reject and disconnect, or call `callback()` with `true` or `false` if you need to perform asynchronous verification. **Default:** (auto-accept if `hostVerifier` is not set) + + * **username** - _string_ - Username for authentication. **Default:** (none) + + * **password** - _string_ - Password for password-based user authentication. **Default:** (none) + + * **agent** - _string_ - Path to ssh-agent's UNIX socket for ssh-agent-based user authentication. **Windows users: set to 'pageant' for authenticating with Pageant or (actual) path to a cygwin "UNIX socket."** **Default:** (none) + + * **agentForward** - _boolean_ - Set to `true` to use OpenSSH agent forwarding (`auth-agent@openssh.com`) for the life of the connection. `agent` must also be set to use this feature. **Default:** `false` + + * **privateKey** - _mixed_ - _Buffer_ or _string_ that contains a private key for either key-based or hostbased user authentication (OpenSSH format). **Default:** (none) + + * **passphrase** - _string_ - For an encrypted private key, this is the passphrase used to decrypt it. **Default:** (none) + + * **localHostname** - _string_ - Along with **localUsername** and **privateKey**, set this to a non-empty string for hostbased user authentication. **Default:** (none) + + * **localUsername** - _string_ - Along with **localHostname** and **privateKey**, set this to a non-empty string for hostbased user authentication. **Default:** (none) + + * **tryKeyboard** - _boolean_ - Try keyboard-interactive user authentication if primary user authentication method fails. If you set this to `true`, you need to handle the `keyboard-interactive` event. **Default:** `false` + + * **keepaliveInterval** - _integer_ - How often (in milliseconds) to send SSH-level keepalive packets to the server (in a similar way as OpenSSH's ServerAliveInterval config option). Set to 0 to disable. **Default:** `0` + + * **keepaliveCountMax** - _integer_ - How many consecutive, unanswered SSH-level keepalive packets that can be sent to the server before disconnection (similar to OpenSSH's ServerAliveCountMax config option). **Default:** `3` + + * **readyTimeout** - _integer_ - How long (in milliseconds) to wait for the SSH handshake to complete. **Default:** `20000` + + * **sock** - _ReadableStream_ - A _ReadableStream_ to use for communicating with the server instead of creating and using a new TCP connection (useful for connection hopping). + + * **strictVendor** - _boolean_ - Performs a strict server vendor check before sending vendor-specific requests, etc. (e.g. check for OpenSSH server when using `openssh_noMoreSessions()`) **Default:** `true` + + * **algorithms** - _object_ - This option allows you to explicitly override the default transport layer algorithms used for the connection. Each value must be an array of valid algorithms for that category. The order of the algorithms in the arrays are important, with the most favorable being first. For a list of valid and default algorithm names, please review the documentation for the version of `ssh2-streams` used by this module. Valid keys: + + * **kex** - _array_ - Key exchange algorithms. + + * **cipher** - _array_ - Ciphers. + + * **serverHostKey** - _array_ - Server host key formats. + + * **hmac** - _array_ - (H)MAC algorithms. + + * **compress** - _array_ - Compression algorithms. + + * **compress** - _mixed_ - Set to `true` to enable compression if server supports it, `'force'` to force compression (disconnecting if server does not support it), or `false` to explicitly opt out of compression all of the time. Note: this setting is overridden when explicitly setting a compression algorithm in the `algorithms` configuration option. **Default:** (only use compression if that is only what the server supports) + + * **debug** - _function_ - Set this to a function that receives a single string argument to get detailed (local) debug information. **Default:** (none) + +**Authentication method priorities:** Password -> Private Key -> Agent (-> keyboard-interactive if `tryKeyboard` is `true`) -> Hostbased -> None + +* **exec**(< _string_ >command[, < _object_ >options], < _function_ >callback) - _boolean_ - Executes `command` on the server. Returns `false` if you should wait for the `continue` event before sending any more traffic. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Valid `options` properties are: + + * **env** - _object_ - An environment to use for the execution of the command. + + * **pty** - _mixed_ - Set to `true` to allocate a pseudo-tty with defaults, or an object containing specific pseudo-tty settings (see 'Pseudo-TTY settings'). Setting up a pseudo-tty can be useful when working with remote processes that expect input from an actual terminal (e.g. sudo's password prompt). + + * **x11** - _mixed_ - Set to `true` to use defaults below, set to a number to specify a specific screen number, or an object with the following valid properties: + + * **single** - _boolean_ - Allow just a single connection? **Default:** `false` + + * **screen** - _number_ - Screen number to use **Default:** `0` + +* **shell**([[< _mixed_ >window,] < _object_ >options]< _function_ >callback) - _boolean_ - Starts an interactive shell session on the server, with an optional `window` object containing pseudo-tty settings (see 'Pseudo-TTY settings'). If `window === false`, then no pseudo-tty is allocated. `options` supports the `x11` option as described in exec(). `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **forwardIn**(< _string_ >remoteAddr, < _integer_ >remotePort, < _function_ >callback) - _boolean_ - Bind to `remoteAddr` on `remotePort` on the server and forward incoming TCP connections. `callback` has 2 parameters: < _Error_ >err, < _integer_ >port (`port` is the assigned port number if `remotePort` was 0). Returns `false` if you should wait for the `continue` event before sending any more traffic. Here are some special values for `remoteAddr` and their associated binding behaviors: + + * '' - Connections are to be accepted on all protocol families supported by the server. + + * '0.0.0.0' - Listen on all IPv4 addresses. + + * '::' - Listen on all IPv6 addresses. + + * 'localhost' - Listen on all protocol families supported by the server on loopback addresses only. + + * '127.0.0.1' and '::1' - Listen on the loopback interfaces for IPv4 and IPv6, respectively. + +* **unforwardIn**(< _string_ >remoteAddr, < _integer_ >remotePort, < _function_ >callback) - _boolean_ - Unbind from `remoteAddr` on `remotePort` on the server and stop forwarding incoming TCP connections. Until `callback` is called, more connections may still come in. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **forwardOut**(< _string_ >srcIP, < _integer_ >srcPort, < _string_ >dstIP, < _integer_ >dstPort, < _function_ >callback) - _boolean_ - Open a connection with `srcIP` and `srcPort` as the originating address and port and `dstIP` and `dstPort` as the remote destination address and port. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **sftp**(< _function_ >callback) - _boolean_ - Starts an SFTP session. `callback` has 2 parameters: < _Error_ >err, < _SFTPStream_ >sftp. For methods available on `sftp`, see the [`SFTPStream` client documentation](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md) (except `read()` and `write()` are used instead of `readData()` and `writeData()` respectively, for convenience). Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **subsys**(< _string_ >subsystem, < _function_ >callback) - _boolean_ - Invokes `subsystem` on the server. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **end**() - _(void)_ - Disconnects the socket. + +* **openssh_noMoreSessions**(< _function_ >callback) - _boolean_ - OpenSSH extension that sends a request to reject any new sessions (e.g. exec, shell, sftp, subsys) for this connection. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_forwardInStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - OpenSSH extension that binds to a UNIX domain socket at `socketPath` on the server and forwards incoming connections. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_unforwardInStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - OpenSSH extension that unbinds from a UNIX domain socket at `socketPath` on the server and stops forwarding incoming connections. `callback` has 1 parameter: < _Error_ >err. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_forwardOutStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - OpenSSH extension that opens a connection to a UNIX domain socket at `socketPath` on the server. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + +Server events +------------- + +* **connection**(< _Connection_ >client, < _object_ >info) - A new client has connected. `info` contains the following properties: + + * **ip** - _string_ - The remoteAddress of the connection. + + * **header** - _object_ - Information about the client's header: + + * **identRaw** - _string_ - The raw client identification string. + + * **versions** - _object_ - Various version information: + + * **protocol** - _string_ - The SSH protocol version (always `1.99` or `2.0`). + + * **software** - _string_ - The software name and version of the client. + + * **comments** - _string_ - Any text that comes after the software name/version. + + Example: the identification string `SSH-2.0-OpenSSH_6.6.1p1 Ubuntu-2ubuntu2` would be parsed as: + +```javascript + { identRaw: 'SSH-2.0-OpenSSH_6.6.1p1 Ubuntu-2ubuntu2', + version: { + protocol: '2.0', + software: 'OpenSSH_6.6.1p1' + }, + comments: 'Ubuntu-2ubuntu2' } +``` + +Server methods +-------------- + +* **(constructor)**(< _object_ >config[, < _function_ >connectionListener]) - Creates and returns a new Server instance. Server instances also have the same methods/properties/events as [`net.Server`](http://nodejs.org/docs/latest/api/net.html#net_class_net_server). `connectionListener` if supplied, is added as a `connection` listener. Valid `config` properties: + + * **hostKeys** - _array_ - An array of either Buffers/strings that contain host private keys or objects in the format of `{ key: , passphrase: }` for encrypted private keys. (**Required**) **Default:** (none) + + * **algorithms** - _object_ - This option allows you to explicitly override the default transport layer algorithms used for incoming client connections. Each value must be an array of valid algorithms for that category. The order of the algorithms in the arrays are important, with the most favorable being first. For a list of valid and default algorithm names, please review the documentation for the version of `ssh2-streams` used by this module. Valid keys: + + * **kex** - _array_ - Key exchange algorithms. + + * **cipher** - _array_ - Ciphers. + + * **serverHostKey** - _array_ - Server host key formats. + + * **hmac** - _array_ - (H)MAC algorithms. + + * **compress** - _array_ - Compression algorithms. + + * **greeting** - _string_ - A message that is sent to clients immediately upon connection, before handshaking begins. **Note:** Most clients usually ignore this. **Default:** (none) + + * **banner** - _string_ - A message that is sent to clients once, right before authentication begins. **Default:** (none) + + * **ident** - _string_ - A custom server software name/version identifier. **Default:** `'ssh2js' + moduleVersion + 'srv'` + + * **highWaterMark** - _integer_ - This is the `highWaterMark` to use for the parser stream. **Default:** `32 * 1024` + + * **debug** - _function_ - Set this to a function that receives a single string argument to get detailed (local) debug information. **Default:** (none) + + +Connection events +----------------- + +* **authentication**(< _AuthContext_ >ctx) - The client has requested authentication. `ctx.username` contains the client username, `ctx.method` contains the requested authentication method, and `ctx.accept()` and `ctx.reject([< Array >authMethodsLeft[, < Boolean >isPartialSuccess]])` are used to accept or reject the authentication request respectively. `abort` is emitted if the client aborts the authentication request. Other properties/methods available on `ctx` depends on the `ctx.method` of authentication the client has requested: + + * `password`: + + * **password** - _string_ - This is the password sent by the client. + + * `publickey`: + + * **key** - _object_ - Contains information about the public key sent by the client: + + * **algo** - _string_ - The name of the key algorithm (e.g. `ssh-rsa`). + + * **data** - _Buffer_ - The actual key data. + + * **sigAlgo** - _mixed_ - If the value is `undefined`, the client is only checking the validity of the `key`. If the value is a _string_, then this contains the signature algorithm that is passed to [`crypto.createVerify()`](http://nodejs.org/docs/latest/api/crypto.html#crypto_crypto_createverify_algorithm). + + * **blob** - _mixed_ - If the value is `undefined`, the client is only checking the validity of the `key`. If the value is a _Buffer_, then this contains the data that is passed to [`verifier.update()`](http://nodejs.org/docs/latest/api/crypto.html#crypto_verifier_update_data). + + * **signature** - _mixed_ - If the value is `undefined`, the client is only checking the validity of the `key`. If the value is a _Buffer_, then this contains a signature that is passed to [`verifier.verify()`](http://nodejs.org/docs/latest/api/crypto.html#crypto_verifier_verify_object_signature_signature_format). + + * `keyboard-interactive`: + + * **submethods** - _array_ - A list of preferred authentication "sub-methods" sent by the client. This may be used to determine what (if any) prompts to send to the client. + + * **prompt**(< _array_ >prompts[, < _string_ >title[, < _string_ >instructions]], < _function_ >callback) - _boolean_ - Send prompts to the client. `prompts` is an array of `{ prompt: 'Prompt text', echo: true }` objects (`prompt` being the prompt text and `echo` indicating whether the client's response to the prompt should be echoed to their display). `callback` is called with `(err, responses)`, where `responses` is an array of string responses matching up to the `prompts`. + +* **ready**() - Emitted when the client has been successfully authenticated. + +* **session**(< _function_ >accept, < _function_ >reject) - Emitted when the client has requested a new session. Sessions are used to start interactive shells, execute commands, request X11 forwarding, etc. `accept()` returns a new _Session_ instance. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **tcpip**(< _function_ >accept, < _function_ >reject, < _object_ >info) - Emitted when the client has requested an outbound (TCP) connection. `accept()` returns a new _Channel_ instance representing the connection. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` contains: + + * **srcIP** - _string_ - Source IP address of outgoing connection. + + * **srcPort** - _string_ - Source port of outgoing connection. + + * **destIP** - _string_ - Destination IP address of outgoing connection. + + * **destPort** - _string_ - Destination port of outgoing connection. + +* **openssh.streamlocal**(< _function_ >accept, < _function_ >reject, < _object_ >info) - Emitted when the client has requested a connection to a UNIX domain socket. `accept()` returns a new _Channel_ instance representing the connection. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` contains: + + * **socketPath** - _string_ - Destination socket path of outgoing connection. + +* **request**(< _mixed_ >accept, < _mixed_ >reject, < _string_ >name, < _object_ >info) - Emitted when the client has sent a global request for `name` (e.g. `tcpip-forward` or `cancel-tcpip-forward`). `accept` and `reject` are functions if the client requested a response. If `bindPort === 0`, you should pass the chosen port to `accept()` so that the client will know what port was bound. `info` contains additional details about the request: + + * `tcpip-forward` and `cancel-tcpip-forward`: + + * **bindAddr** - _string_ - The IP address to start/stop binding to. + + * **bindPort** - _integer_ - The port to start/stop binding to. + + * `streamlocal-forward@openssh.com` and `cancel-streamlocal-forward@openssh.com`: + + * **socketPath** - _string_ - The socket path to start/stop binding to. + +* **rekey**() - Emitted when the client has finished rekeying (either client or server initiated). + +* **continue**() - Emitted when more requests/data can be sent to the client (after a `Connection` method returned `false`). + +* **error**(< _Error_ >err) - An error occurred. + +* **end**() - The client socket disconnected. + +* **close**(< _boolean_ >hadError) - The client socket was closed. `hadError` is set to `true` if this was due to error. + +Connection methods +------------------ + +* **end**() - _boolean_ - Closes the client connection. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **x11**(< _string_ >originAddr, < _integer_ >originPort, < _function_ >callback) - _boolean_ - Alert the client of an incoming X11 client connection from `originAddr` on port `originPort`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **forwardOut**(< _string_ >boundAddr, < _integer_ >boundPort, < _string_ >remoteAddr, < _integer_ >remotePort, < _function_ >callback) - _boolean_ - Alert the client of an incoming TCP connection on `boundAddr` on port `boundPort` from `remoteAddr` on port `remotePort`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **openssh_forwardOutStreamLocal**(< _string_ >socketPath, < _function_ >callback) - _boolean_ - Alert the client of an incoming UNIX domain socket connection on `socketPath`. `callback` has 2 parameters: < _Error_ >err, < _Channel_ >stream. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **rekey**([< _function_ >callback]) - _boolean_ - Initiates a rekeying with the client. If `callback` is supplied, it is added as a one-time handler for the `rekey` event. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + +Session events +-------------- + +* **pty**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client requested allocation of a pseudo-TTY for this session. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **cols** - _integer_ - The number of columns for the pseudo-TTY. + + * **rows** - _integer_ - The number of rows for the pseudo-TTY. + + * **width** - _integer_ - The width of the pseudo-TTY in pixels. + + * **height** - _integer_ - The height of the pseudo-TTY in pixels. + + * **modes** - _object_ - Contains the requested terminal modes of the pseudo-TTY keyed on the mode name with the value being the mode argument. (See the table at the end for valid names). + +* **window-change**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client reported a change in window dimensions during this session. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **cols** - _integer_ - The new number of columns for the client window. + + * **rows** - _integer_ - The new number of rows for the client window. + + * **width** - _integer_ - The new width of the client window in pixels. + + * **height** - _integer_ - The new height of the client window in pixels. + +* **x11**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client requested X11 forwarding. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **single** - _boolean_ - `true` if only a single connection should be forwarded. + + * **protocol** - _string_ - The name of the X11 authentication method used (e.g. `MIT-MAGIC-COOKIE-1`). + + * **cookie** - _string_ - The X11 authentication cookie encoded in hexadecimal. + + * **screen** - _integer_ - The screen number to forward X11 connections for. + +* **env**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client requested an environment variable to be set for this session. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **key** - _string_ - The environment variable's name. + + * **value** - _string_ - The environment variable's value. + +* **signal**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client has sent a signal. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **name** - _string_ - The signal name (e.g. `SIGUSR1`). + +* **auth-agent**(< _mixed_ >accept, < _mixed_ >reject) - The client has requested incoming ssh-agent requests be forwarded to them. `accept` and `reject` are functions if the client requested a response and return `false` if you should wait for the `continue` event before sending any more traffic. + +* **shell**(< _mixed_ >accept, < _mixed_ >reject) - The client has requested an interactive shell. `accept` and `reject` are functions if the client requested a response. `accept()` returns a _Channel_ for the interactive shell. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* **exec**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client has requested execution of a command string. `accept` and `reject` are functions if the client requested a response. `accept()` returns a _Channel_ for the command execution. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **command** - _string_ - The command line to be executed. + +* **sftp**(< _mixed_ >accept, < _mixed_ >reject) - The client has requested the SFTP subsystem. `accept` and `reject` are functions if the client requested a response. `accept()` returns an _SFTPStream_ in server mode (see the [`SFTPStream` documentation](https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md) for details). `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + +* **subsystem**(< _mixed_ >accept, < _mixed_ >reject, < _object_ >info) - The client has requested an arbitrary subsystem. `accept` and `reject` are functions if the client requested a response. `accept()` returns a _Channel_ for the subsystem. `reject()` Returns `false` if you should wait for the `continue` event before sending any more traffic. `info` has these properties: + + * **name** - _string_ - The name of the subsystem. + +* **close**() - The session was closed. + + +Channel +------- + +This is a normal **streams2** Duplex Stream, with the following changes: + +* A boolean property `allowHalfOpen` exists and behaves similarly to the property of the same name for `net.Socket`. When the stream's end() is called, if `allowHalfOpen` is `true`, only EOF will be sent (the server can still send data if they have not already sent EOF). The default value for this property is `true`. + +* A `close` event is emitted once the channel is completely closed on both the client and server. + +* Client-only: + + + * For exec(): + + * An `exit` event *may* (the SSH2 spec says it is optional) be emitted when the process finishes. If the process finished normally, the process's return value is passed to the `exit` callback. If the process was interrupted by a signal, the following are passed to the `exit` callback: null, < _string_ >signalName, < _boolean_ >didCoreDump, < _string_ >description. + + * If there was an `exit` event, the `close` event will be passed the same arguments for convenience. + + * For shell() and exec(): + + * The readable side represents stdout and the writable side represents stdin. + + * A `stderr` property contains a Readable stream that represents output from stderr. + + * **signal**(< _string_ >signalName) - _boolean_ - Sends a POSIX signal to the current process on the server. Valid signal names are: 'ABRT', 'ALRM', 'FPE', 'HUP', 'ILL', 'INT', 'KILL', 'PIPE', 'QUIT', 'SEGV', 'TERM', 'USR1', and 'USR2'. Some server implementations may ignore this request if they do not support signals. Note: If you are trying to send SIGINT and you find `signal()` doesn't work, try writing `'\x03'` to the Channel stream instead. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + * **setWindow**(< _integer_ >rows, < _integer_ >cols, < _integer_ >height, < _integer_ >width) - _boolean_ - Lets the server know that the local terminal window has been resized. The meaning of these arguments are described in the 'Pseudo-TTY settings' section. Returns `false` if you should wait for the `continue` event before sending any more traffic. + +* Server-only: + + * For exec-enabled channel instances there is an additional method available that may be called right before you close the channel. It has two different signatures: + + * **exit**(< _integer_ >exitCode) - _boolean_ - Sends an exit status code to the client. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + * **exit**(< _string_ >signalName[, < _boolean_ >coreDumped[, < _string_ >errorMsg]]) - _boolean_ - Sends an exit status code to the client. Returns `false` if you should wait for the `continue` event before sending any more traffic. + + * For exec and shell-enabled channel instances, `channel.stderr` is a writable stream. + + +Pseudo-TTY settings +------------------- + +* **rows** - < _integer_ > - Number of rows **Default:** `24` + +* **cols** - < _integer_ > - Number of columns **Default:** `80` + +* **height** - < _integer_ > - Height in pixels **Default:** `480` + +* **width** - < _integer_ > - Width in pixels **Default:** `640` + +* **term** - < _string_ > - The value to use for $TERM **Default:** `'vt100'` + +`rows` and `cols` override `width` and `height` when `rows` and `cols` are non-zero. + +Pixel dimensions refer to the drawable area of the window. + +Zero dimension parameters are ignored. + + +Terminal modes +-------------- + +
+Name           Description
+------------------------------------------------------------
+VINTR          Interrupt character; 255 if none.  Similarly
+               for the other characters.  Not all of these
+               characters are supported on all systems.
+VQUIT          The quit character (sends SIGQUIT signal on
+               POSIX systems).
+VERASE         Erase the character to left of the cursor.
+VKILL          Kill the current input line.
+VEOF           End-of-file character (sends EOF from the
+               terminal).
+VEOL           End-of-line character in addition to
+               carriage return and/or linefeed.
+VEOL2          Additional end-of-line character.
+VSTART         Continues paused output (normally
+               control-Q).
+VSTOP          Pauses output (normally control-S).
+VSUSP          Suspends the current program.
+VDSUSP         Another suspend character.
+VREPRINT       Reprints the current input line.
+VWERASE        Erases a word left of cursor.
+VLNEXT         Enter the next character typed literally,
+               even if it is a special character
+VFLUSH         Character to flush output.
+VSWTCH         Switch to a different shell layer.
+VSTATUS        Prints system status line (load, command,
+               pid, etc).
+VDISCARD       Toggles the flushing of terminal output.
+IGNPAR         The ignore parity flag.  The parameter
+               SHOULD be 0 if this flag is FALSE,
+               and 1 if it is TRUE.
+PARMRK         Mark parity and framing errors.
+INPCK          Enable checking of parity errors.
+ISTRIP         Strip 8th bit off characters.
+INLCR          Map NL into CR on input.
+IGNCR          Ignore CR on input.
+ICRNL          Map CR to NL on input.
+IUCLC          Translate uppercase characters to
+               lowercase.
+IXON           Enable output flow control.
+IXANY          Any char will restart after stop.
+IXOFF          Enable input flow control.
+IMAXBEL        Ring bell on input queue full.
+ISIG           Enable signals INTR, QUIT, [D]SUSP.
+ICANON         Canonicalize input lines.
+XCASE          Enable input and output of uppercase
+               characters by preceding their lowercase
+               equivalents with "\".
+ECHO           Enable echoing.
+ECHOE          Visually erase chars.
+ECHOK          Kill character discards current line.
+ECHONL         Echo NL even if ECHO is off.
+NOFLSH         Don't flush after interrupt.
+TOSTOP         Stop background jobs from output.
+IEXTEN         Enable extensions.
+ECHOCTL        Echo control characters as ^(Char).
+ECHOKE         Visual erase for line kill.
+PENDIN         Retype pending input.
+OPOST          Enable output processing.
+OLCUC          Convert lowercase to uppercase.
+ONLCR          Map NL to CR-NL.
+OCRNL          Translate carriage return to newline
+               (output).
+ONOCR          Translate newline to carriage
+               return-newline (output).
+ONLRET         Newline performs a carriage return
+               (output).
+CS7            7 bit mode.
+CS8            8 bit mode.
+PARENB         Parity enable.
+PARODD         Odd parity, else even.
+TTY_OP_ISPEED  Specifies the input baud rate in
+               bits per second.
+TTY_OP_OSPEED  Specifies the output baud rate in
+               bits per second.
+
diff --git a/reverse_engineering/node_modules/ssh2/examples/server-chat.js b/reverse_engineering/node_modules/ssh2/examples/server-chat.js new file mode 100644 index 0000000..18e5108 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/examples/server-chat.js @@ -0,0 +1,243 @@ +// **BEFORE RUNNING THIS SCRIPT:** +// 1. The server portion is best run on non-Windows systems because they have +// terminfo databases which are needed to properly work with different +// terminal types of client connections +// 2. Install `blessed`: `npm install blessed` +// 3. Create a server host key in this same directory and name it `host.key` + +var fs = require('fs'); + +var blessed = require('blessed'); +var Server = require('ssh2').Server; + +var RE_SPECIAL = /[\x00-\x1F\x7F]+|(?:\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[m|K])/g; +var MAX_MSG_LEN = 128; +var MAX_NAME_LEN = 10; +var PROMPT_NAME = 'Enter a nickname to use (max ' + MAX_NAME_LEN + ' chars): '; + +var users = []; + +function formatMessage(msg, output) { + var output = output; + output.parseTags = true; + msg = output._parseTags(msg); + output.parseTags = false; + return msg; +} + +function userBroadcast(msg, source) { + var sourceMsg = '> ' + msg; + var name = '{cyan-fg}{bold}' + source.name + '{/}'; + msg = ': ' + msg; + for (var i = 0; i < users.length; ++i) { + var user = users[i]; + var output = user.output; + if (source === user) + output.add(sourceMsg); + else + output.add(formatMessage(name, output) + msg); + } +} + +function localMessage(msg, source) { + var output = source.output; + output.add(formatMessage(msg, output)); +} + +function noop(v) {} + +new Server({ + hostKeys: [fs.readFileSync('host.key')], +}, function(client) { + var stream; + var name; + + client.on('authentication', function(ctx) { + var nick = ctx.username; + var prompt = PROMPT_NAME; + var lowered; + // Try to use username as nickname + if (nick.length > 0 && nick.length <= MAX_NAME_LEN) { + lowered = nick.toLowerCase(); + var ok = true; + for (var i = 0; i < users.length; ++i) { + if (users[i].name.toLowerCase() === lowered) { + ok = false; + prompt = 'That nickname is already in use.\n' + PROMPT_NAME; + break; + } + } + if (ok) { + name = nick; + return ctx.accept(); + } + } else if (nick.length === 0) + prompt = 'A nickname is required.\n' + PROMPT_NAME; + else + prompt = 'That nickname is too long.\n' + PROMPT_NAME; + + if (ctx.method !== 'keyboard-interactive') + return ctx.reject(['keyboard-interactive']); + + ctx.prompt(prompt, function retryPrompt(answers) { + if (answers.length === 0) + return ctx.reject(['keyboard-interactive']); + nick = answers[0]; + if (nick.length > MAX_NAME_LEN) { + return ctx.prompt('That nickname is too long.\n' + PROMPT_NAME, + retryPrompt); + } else if (nick.length === 0) { + return ctx.prompt('A nickname is required.\n' + PROMPT_NAME, + retryPrompt); + } + lowered = nick.toLowerCase(); + for (var i = 0; i < users.length; ++i) { + if (users[i].name.toLowerCase() === lowered) { + return ctx.prompt('That nickname is already in use.\n' + PROMPT_NAME, + retryPrompt); + } + } + name = nick; + ctx.accept(); + }); + }).on('ready', function() { + var rows; + var cols; + var term; + client.once('session', function(accept, reject) { + accept().once('pty', function(accept, reject, info) { + rows = info.rows; + cols = info.cols; + term = info.term; + accept && accept(); + }).on('window-change', function(accept, reject, info) { + rows = info.rows; + cols = info.cols; + if (stream) { + stream.rows = rows; + stream.columns = cols; + stream.emit('resize'); + } + accept && accept(); + }).once('shell', function(accept, reject) { + stream = accept(); + users.push(stream); + + stream.name = name; + stream.rows = rows || 24; + stream.columns = cols || 80; + stream.isTTY = true; + stream.setRawMode = noop; + stream.on('error', noop); + + var screen = new blessed.screen({ + autoPadding: true, + smartCSR: true, + program: new blessed.program({ + input: stream, + output: stream + }), + terminal: term || 'ansi' + }); + + screen.title = 'SSH Chatting as ' + name; + // Disable local echo + screen.program.attr('invisible', true); + + var output = stream.output = new blessed.log({ + screen: screen, + top: 0, + left: 0, + width: '100%', + bottom: 2, + scrollOnInput: true + }) + screen.append(output); + + screen.append(new blessed.box({ + screen: screen, + height: 1, + bottom: 1, + left: 0, + width: '100%', + type: 'line', + ch: '=' + })); + + var input = new blessed.textbox({ + screen: screen, + bottom: 0, + height: 1, + width: '100%', + inputOnFocus: true + }); + screen.append(input); + + input.focus(); + + // Local greetings + localMessage('{blue-bg}{white-fg}{bold}Welcome to SSH Chat!{/}\n' + + 'There are {bold}' + + (users.length - 1) + + '{/} other user(s) connected.\n' + + 'Type /quit or /exit to exit the chat.', + stream); + + // Let everyone else know that this user just joined + for (var i = 0; i < users.length; ++i) { + var user = users[i]; + var output = user.output; + if (user === stream) + continue; + output.add(formatMessage('{green-fg}*** {bold}', output) + + name + + formatMessage('{/bold} has joined the chat{/}', output)); + } + + screen.render(); + // XXX This fake resize event is needed for some terminals in order to + // have everything display correctly + screen.program.emit('resize'); + + // Read a line of input from the user + input.on('submit', function(line) { + input.clearValue(); + screen.render(); + if (!input.focused) + input.focus(); + line = line.replace(RE_SPECIAL, '').trim(); + if (line.length > MAX_MSG_LEN) + line = line.substring(0, MAX_MSG_LEN); + if (line.length > 0) { + if (line === '/quit' || line === '/exit') + stream.end(); + else + userBroadcast(line, stream); + } + }); + }); + }); + }).on('end', function() { + if (stream !== undefined) { + spliceOne(users, users.indexOf(stream)); + // Let everyone else know that this user just left + for (var i = 0; i < users.length; ++i) { + var user = users[i]; + var output = user.output; + output.add(formatMessage('{magenta-fg}*** {bold}', output) + + name + + formatMessage('{/bold} has left the chat{/}', output)); + } + } + }).on('error', function(err) { + // Ignore errors + }); +}).listen(0, function() { + console.log('Listening on port ' + this.address().port); +}); + +function spliceOne(list, index) { + for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1) + list[i] = list[k]; + list.pop(); +} diff --git a/reverse_engineering/node_modules/ssh2/examples/sftp-server-download-only.js b/reverse_engineering/node_modules/ssh2/examples/sftp-server-download-only.js new file mode 100644 index 0000000..8ea5a5a --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/examples/sftp-server-download-only.js @@ -0,0 +1,96 @@ +var constants = require('constants'); +var fs = require('fs'); + +var ssh2 = require('ssh2'); +var OPEN_MODE = ssh2.SFTP_OPEN_MODE; +var STATUS_CODE = ssh2.SFTP_STATUS_CODE; + +new ssh2.Server({ + hostKeys: [fs.readFileSync('host.key')] +}, function(client) { + console.log('Client connected!'); + + client.on('authentication', function(ctx) { + if (ctx.method === 'password' + && ctx.username === 'foo' + && ctx.password === 'bar') + ctx.accept(); + else + ctx.reject(['password']); + }).on('ready', function() { + console.log('Client authenticated!'); + + client.on('session', function(accept, reject) { + var session = accept(); + session.on('sftp', function(accept, reject) { + console.log('Client SFTP session'); + var openFiles = {}; + var handleCount = 0; + // `sftpStream` is an `SFTPStream` instance in server mode + // see: https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md + var sftpStream = accept(); + sftpStream.on('OPEN', function(reqid, filename, flags, attrs) { + console.log('OPEN', filename); + // only allow opening /tmp/foo.txt for writing + if (filename !== '/tmp/foo.txt' || !(flags & OPEN_MODE.READ)) + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + // create a fake handle to return to the client, this could easily + // be a real file descriptor number for example if actually opening + // the file on the disk + var handle = new Buffer(4); + openFiles[handleCount] = { read: false }; + handle.writeUInt32BE(handleCount++, 0, true); + sftpStream.handle(reqid, handle); + console.log('Opening file for read') + }).on('READ', function(reqid, handle, offset, length) { + if (handle.length !== 4 || !openFiles[handle.readUInt32BE(0, true)]) + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + // fake the read + var state = openFiles[handle.readUInt32BE(0, true)]; + if (state.read) + sftpStream.status(reqid, STATUS_CODE.EOF); + else { + state.read = true; + sftpStream.data(reqid, 'bar'); + console.log('Read from file at offset %d, length %d', offset, length); + } + }).on('CLOSE', function(reqid, handle) { + var fnum; + if (handle.length !== 4 || !openFiles[(fnum = handle.readUInt32BE(0, true))]) + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + delete openFiles[fnum]; + sftpStream.status(reqid, STATUS_CODE.OK); + console.log('Closing file'); + }).on('REALPATH', function(reqid, path) { + var name = [{ + filename: '/tmp/foo.txt', + longname: '-rwxrwxrwx 1 foo foo 3 Dec 8 2009 foo.txt', + attrs: {} + }]; + sftpStream.name(reqid, name); + }).on('STAT', onSTAT) + .on('LSTAT', onSTAT); + function onSTAT(reqid, path) { + if (path !== '/tmp/foo.txt') + return sftpStream.status(reqid, STATUS_CODE.FAILURE); + var mode = constants.S_IFREG; // Regular file + mode |= constants.S_IRWXU; // read, write, execute for user + mode |= constants.S_IRWXG; // read, write, execute for group + mode |= constants.S_IRWXO; // read, write, execute for other + sftpStream.attrs(reqid, { + mode: mode, + uid: 0, + gid: 0, + size: 3, + atime: Date.now(), + mtime: Date.now() + }); + } + }); + }); + }).on('end', function() { + console.log('Client disconnected'); + }); +}).listen(0, '127.0.0.1', function() { + console.log('Listening on port ' + this.address().port); +}); diff --git a/reverse_engineering/node_modules/ssh2/lib/Channel.js b/reverse_engineering/node_modules/ssh2/lib/Channel.js new file mode 100644 index 0000000..1d8372d --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/Channel.js @@ -0,0 +1,506 @@ +var inherits = require('util').inherits; +var DuplexStream = require('stream').Duplex; +var ReadableStream = require('stream').Readable; +var WritableStream = require('stream').Writable; + +var STDERR = require('ssh2-streams').constants.CHANNEL_EXTENDED_DATATYPE.STDERR; + +var PACKET_SIZE = 32 * 1024; +var MAX_WINDOW = 1 * 1024 * 1024; +var WINDOW_THRESHOLD = MAX_WINDOW / 2; +var CUSTOM_EVENTS = [ + 'CHANNEL_EOF', + 'CHANNEL_CLOSE', + 'CHANNEL_DATA', + 'CHANNEL_EXTENDED_DATA', + 'CHANNEL_WINDOW_ADJUST', + 'CHANNEL_SUCCESS', + 'CHANNEL_FAILURE', + 'CHANNEL_REQUEST' +]; +var CUSTOM_EVENTS_LEN = CUSTOM_EVENTS.length; + +function Channel(info, client, opts) { + var streamOpts = { + highWaterMark: MAX_WINDOW, + allowHalfOpen: (!opts || (opts && opts.allowHalfOpen !== false)) + }; + + this.allowHalfOpen = streamOpts.allowHalfOpen; + + DuplexStream.call(this, streamOpts); + + var self = this; + var server = opts && opts.server; + + this.server = server; + this.type = info.type; + this.subtype = undefined; + /* + incoming and outgoing contain these properties: + { + id: undefined, + window: undefined, + packetSize: undefined, + state: 'closed' + } + */ + var incoming = this.incoming = info.incoming; + var incomingId = incoming.id; + var outgoing = this.outgoing = info.outgoing; + var callbacks = this._callbacks = []; + var exitCode; + var exitSignal; + var exitDump; + var exitDesc; + var exitLang; + + this._client = client; + this._hasX11 = false; + + var channels = client._channels; + var sshstream = client._sshstream; + + function ondrain() { + if (self._waitClientDrain) { + self._waitClientDrain = false; + if (!self._waitWindow) { + if (self._chunk) + self._write(self._chunk, null, self._chunkcb); + else if (self._chunkcb) + self._chunkcb(); + else if (self._chunkErr) + self.stderr._write(self._chunkErr, null, self._chunkcbErr); + else if (self._chunkcbErr) + self._chunkcbErr(); + } + } + } + client._sock.on('drain', ondrain); + + sshstream.once('CHANNEL_EOF:' + incomingId, function() { + if (incoming.state === 'closed' || incoming.state === 'eof') + return; + incoming.state = 'eof'; + + if (self.readable) + self.push(null); + if (!server && self.stderr.readable) + self.stderr.push(null); + }).once('CHANNEL_CLOSE:' + incomingId, function() { + if (incoming.state === 'closed') + return; + incoming.state = 'closed'; + + if (self.readable) + self.push(null); + if (server && self.stderr.writable) + self.stderr.end(); + else if (!server && self.stderr.readable) + self.stderr.push(null); + + if (outgoing.state === 'open' || outgoing.state === 'eof') + self.close(); + if (outgoing.state === 'closing') + outgoing.state = 'closed'; + + delete channels[incomingId]; + + var state = self._writableState; + client._sock.removeListener('drain', ondrain); + if (!state.ending && !state.finished) + self.end(); + + // Take care of any outstanding channel requests + self._callbacks = []; + for (var i = 0; i < callbacks.length; ++i) + callbacks[i](true); + callbacks = self._callbacks; + + if (!server) { + // align more with node child processes, where the close event gets the + // same arguments as the exit event + if (!self.readable) { + if (exitCode === null) { + self.emit('close', exitCode, exitSignal, exitDump, exitDesc, + exitLang); + } else + self.emit('close', exitCode); + } else { + self.once('end', function() { + if (exitCode === null) { + self.emit('close', exitCode, exitSignal, exitDump, exitDesc, + exitLang); + } else + self.emit('close', exitCode); + }); + } + + if (!self.stderr.readable) + self.stderr.emit('close'); + else { + self.stderr.once('end', function() { + self.stderr.emit('close'); + }); + } + } else { // Server mode + if (!self.readable) + self.emit('close'); + else { + self.once('end', function() { + self.emit('close'); + }); + } + } + + for (var i = 0; i < CUSTOM_EVENTS_LEN; ++i) + sshstream.removeAllListeners(CUSTOM_EVENTS[i] + ':' + incomingId); + }).on('CHANNEL_DATA:' + incomingId, function(data) { + // the remote party should not be sending us data if there is no window + // space available ... + if (incoming.window === 0) + return; + + incoming.window -= data.length; + + if (!self.push(data)) { + self._waitChanDrain = true; + return; + } + + if (incoming.window <= WINDOW_THRESHOLD) + windowAdjust(self); + }).on('CHANNEL_WINDOW_ADJUST:' + incomingId, function(amt) { + // the server is allowing us to send `amt` more bytes of data + outgoing.window += amt; + + if (self._waitWindow) { + self._waitWindow = false; + if (!self._waitClientDrain) { + if (self._chunk) + self._write(self._chunk, null, self._chunkcb); + else if (self._chunkcb) + self._chunkcb(); + else if (self._chunkErr) + self.stderr._write(self._chunkErr, null, self._chunkcbErr); + else if (self._chunkcbErr) + self._chunkcbErr(); + } + } + }).on('CHANNEL_SUCCESS:' + incomingId, function() { + if (server) { + sshstream._kalast = Date.now(); + sshstream._kacnt = 0; + } else + client._resetKA(); + if (callbacks.length) + callbacks.shift()(false); + }).on('CHANNEL_FAILURE:' + incomingId, function() { + if (server) { + sshstream._kalast = Date.now(); + sshstream._kacnt = 0; + } else + client._resetKA(); + if (callbacks.length) + callbacks.shift()(true); + }).on('CHANNEL_REQUEST:' + incomingId, function(info) { + if (!server) { + if (info.request === 'exit-status') { + self.emit('exit', exitCode = info.code); + return; + } else if (info.request === 'exit-signal') { + self.emit('exit', + exitCode = null, + exitSignal = 'SIG' + info.signal, + exitDump = info.coredump, + exitDesc = info.description, + exitLang = info.lang); + return; + } + } + + // keepalive request? OpenSSH will send one as a channel request if there + // is a channel open + + if (info.wantReply) + sshstream.channelFailure(outgoing.id); + }); + + this.stdin = this.stdout = this; + + if (server) + this.stderr = new ServerStderr(this); + else { + this.stderr = new ReadableStream(streamOpts); + this.stderr._read = function(n) { + if (self._waitChanDrain) { + self._waitChanDrain = false; + if (incoming.window <= WINDOW_THRESHOLD) + windowAdjust(self); + } + }; + + sshstream.on('CHANNEL_EXTENDED_DATA:' + incomingId, + function(type, data) { + // the remote party should not be sending us data if there is no window + // space available ... + if (incoming.window === 0) + return; + + incoming.window -= data.length; + + if (!self.stderr.push(data)) { + self._waitChanDrain = true; + return; + } + + if (incoming.window <= WINDOW_THRESHOLD) + windowAdjust(self); + } + ); + } + + // outgoing data + this._waitClientDrain = false; // Client stream-level backpressure + this._waitWindow = false; // SSH-level backpressure + + // incoming data + this._waitChanDrain = false; // Channel Readable side backpressure + + this._chunk = undefined; + this._chunkcb = undefined; + this._chunkErr = undefined; + this._chunkcbErr = undefined; + + function onFinish() { + self.eof(); + if (server || (!server && !self.allowHalfOpen)) + self.close(); + self.writable = false; + } + this.on('finish', onFinish) + .on('prefinish', onFinish); // for node v0.11+ + function onEnd() { + self.readable = false; + } + this.on('end', onEnd) + .on('close', onEnd); +} +inherits(Channel, DuplexStream); + +Channel.prototype.eof = function() { + var ret = true; + var outgoing = this.outgoing; + + if (outgoing.state === 'open') { + outgoing.state = 'eof'; + ret = this._client._sshstream.channelEOF(outgoing.id); + } + + return ret; +}; + +Channel.prototype.close = function() { + var ret = true; + var outgoing = this.outgoing; + + if (outgoing.state === 'open' || outgoing.state === 'eof') { + outgoing.state = 'closing'; + ret = this._client._sshstream.channelClose(outgoing.id); + } + + return ret; +}; + +Channel.prototype._read = function(n) { + if (this._waitChanDrain) { + this._waitChanDrain = false; + if (this.incoming.window <= WINDOW_THRESHOLD) + windowAdjust(this); + } +}; + +Channel.prototype._write = function(data, encoding, cb) { + var sshstream = this._client._sshstream; + var outgoing = this.outgoing; + var packetSize = outgoing.packetSize; + var id = outgoing.id; + var window = outgoing.window; + var len = data.length; + var p = 0; + var ret; + var buf; + var sliceLen; + + if (outgoing.state !== 'open') + return; + + while (len - p > 0 && window > 0) { + sliceLen = len - p; + if (sliceLen > window) + sliceLen = window; + if (sliceLen > packetSize) + sliceLen = packetSize; + + ret = sshstream.channelData(id, data.slice(p, p + sliceLen)); + + p += sliceLen; + window -= sliceLen; + + if (!ret) { + this._waitClientDrain = true; + this._chunk = undefined; + this._chunkcb = cb; + break; + } + } + + outgoing.window = window; + + if (len - p > 0) { + if (window === 0) + this._waitWindow = true; + if (p > 0) { + // partial + buf = new Buffer(len - p); + data.copy(buf, 0, p); + this._chunk = buf; + } else + this._chunk = data; + this._chunkcb = cb; + return; + } + + if (!this._waitClientDrain) + cb(); +}; + +Channel.prototype.destroy = function() { + this.end(); +}; + +// session type-specific methods +Channel.prototype.setWindow = function(rows, cols, height, width) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + if (this.type === 'session' + && (this.subtype === 'shell' || this.subtype === 'exec') + && this.writable + && this.outgoing.state === 'open') { + return this._client._sshstream.windowChange(this.outgoing.id, + rows, + cols, + height, + width); + } + + return true; +}; +Channel.prototype.signal = function(signalName) { + if (this.server) + throw new Error('Client-only method called in server mode'); + + if (this.type === 'session' + && this.writable + && this.outgoing.state === 'open') + return this._client._sshstream.signal(this.outgoing.id, signalName); + + return true; +}; +Channel.prototype.exit = function(name, coreDumped, msg) { + if (!this.server) + throw new Error('Server-only method called in client mode'); + + if (this.type === 'session' + && this.writable + && this.outgoing.state === 'open') { + if (typeof name === 'number') + return this._client._sshstream.exitStatus(this.outgoing.id, name); + else { + return this._client._sshstream.exitSignal(this.outgoing.id, + name, + coreDumped, + msg); + } + } + + return true; +}; + +Channel.MAX_WINDOW = MAX_WINDOW; +Channel.PACKET_SIZE = PACKET_SIZE; + +function windowAdjust(self) { + if (self.outgoing.state !== 'open') + return true; + var amt = MAX_WINDOW - self.incoming.window; + if (amt <= 0) + return true; + self.incoming.window += amt; + return self._client._sshstream.channelWindowAdjust(self.outgoing.id, amt); +} + +function ServerStderr(channel) { + WritableStream.call(this, { highWaterMark: MAX_WINDOW }); + this._channel = channel; +} +inherits(ServerStderr, WritableStream); + +ServerStderr.prototype._write = function(data, encoding, cb) { + var channel = this._channel; + var sshstream = channel._client._sshstream; + var outgoing = channel.outgoing; + var packetSize = outgoing.packetSize; + var id = outgoing.id; + var window = outgoing.window; + var len = data.length; + var p = 0; + var ret; + var buf; + var sliceLen; + + if (channel.outgoing.state !== 'open') + return; + + while (len - p > 0 && window > 0) { + sliceLen = len - p; + if (sliceLen > window) + sliceLen = window; + if (sliceLen > packetSize) + sliceLen = packetSize; + + ret = sshstream.channelExtData(id, data.slice(p, p + sliceLen), STDERR); + + p += sliceLen; + window -= sliceLen; + + if (!ret) { + channel._waitClientDrain = true; + channel._chunkErr = undefined; + channel._chunkcbErr = cb; + break; + } + } + + outgoing.window = window; + + if (len - p > 0) { + if (window === 0) + channel._waitWindow = true; + if (p > 0) { + // partial + buf = new Buffer(len - p); + data.copy(buf, 0, p); + channel._chunkErr = buf; + } else + channel._chunkErr = data; + channel._chunkcbErr = cb; + return; + } + + if (!channel._waitClientDrain) + cb(); +}; + +module.exports = Channel; diff --git a/reverse_engineering/node_modules/ssh2/lib/SFTPWrapper.js b/reverse_engineering/node_modules/ssh2/lib/SFTPWrapper.js new file mode 100644 index 0000000..a04a948 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/SFTPWrapper.js @@ -0,0 +1,145 @@ +// This wrapper class is used to retain backwards compatibility with +// pre-v0.4 ssh2. If it weren't for `read()` and `write()` being used by the +// streams2/3 API, we could just pass the SFTPStream directly to the end user... + +var inherits = require('util').inherits, + EventEmitter = require('events').EventEmitter; + +function SFTPWrapper(stream) { + var self = this; + + EventEmitter.call(this); + + this._stream = stream; + + stream.on('error', function(err) { + self.emit('error', err); + }).on('end', function() { + self.emit('end'); + }).on('close', function() { + self.emit('close'); + }).on('continue', function() { + self.emit('continue'); + }); +} +inherits(SFTPWrapper, EventEmitter); + +// stream-related methods to pass on +SFTPWrapper.prototype.end = function() { + return this._stream.end(); +}; +// SFTPStream client methods +SFTPWrapper.prototype.createReadStream = function(path, options) { + return this._stream.createReadStream(path, options); +}; +SFTPWrapper.prototype.createWriteStream = function(path, options) { + return this._stream.createWriteStream(path, options); +}; +SFTPWrapper.prototype.open = function(path, flags, attrs, cb) { + return this._stream.open(path, flags, attrs, cb); +}; +SFTPWrapper.prototype.close = function(handle, cb) { + return this._stream.close(handle, cb); +}; +SFTPWrapper.prototype.read = function(handle, buf, off, len, position, cb) { + return this._stream.readData(handle, buf, off, len, position, cb); +}; +SFTPWrapper.prototype.write = function(handle, buf, off, len, position, cb) { + return this._stream.writeData(handle, buf, off, len, position, cb); +}; +SFTPWrapper.prototype.fastGet = function(remotePath, localPath, opts, cb) { + return this._stream.fastGet(remotePath, localPath, opts, cb); +}; +SFTPWrapper.prototype.fastPut = function(localPath, remotePath, opts, cb) { + return this._stream.fastPut(localPath, remotePath, opts, cb); +}; +SFTPWrapper.prototype.readFile = function(path, options, callback_) { + return this._stream.readFile(path, options, callback_); +}; +SFTPWrapper.prototype.writeFile = function(path, data, options, callback_) { + return this._stream.writeFile(path, data, options, callback_); +}; +SFTPWrapper.prototype.appendFile = function(path, data, options, callback_) { + return this._stream.appendFile(path, data, options, callback_); +}; +SFTPWrapper.prototype.exists = function(path, cb) { + return this._stream.exists(path, cb); +}; +SFTPWrapper.prototype.unlink = function(filename, cb) { + return this._stream.unlink(filename, cb); +}; +SFTPWrapper.prototype.rename = function(oldPath, newPath, cb) { + return this._stream.rename(oldPath, newPath, cb); +}; +SFTPWrapper.prototype.mkdir = function(path, attrs, cb) { + return this._stream.mkdir(path, attrs, cb); +}; +SFTPWrapper.prototype.rmdir = function(path, cb) { + return this._stream.rmdir(path, cb); +}; +SFTPWrapper.prototype.readdir = function(where, opts, cb) { + return this._stream.readdir(where, opts, cb); +}; +SFTPWrapper.prototype.fstat = function(handle, cb) { + return this._stream.fstat(handle, cb); +}; +SFTPWrapper.prototype.stat = function(path, cb) { + return this._stream.stat(path, cb); +}; +SFTPWrapper.prototype.lstat = function(path, cb) { + return this._stream.lstat(path, cb); +}; +SFTPWrapper.prototype.opendir = function(path, cb) { + return this._stream.opendir(path, cb); +}; +SFTPWrapper.prototype.setstat = function(path, attrs, cb) { + return this._stream.setstat(path, attrs, cb); +}; +SFTPWrapper.prototype.fsetstat = function(handle, attrs, cb) { + return this._stream.fsetstat(handle, attrs, cb); +}; +SFTPWrapper.prototype.futimes = function(handle, atime, mtime, cb) { + return this._stream.futimes(handle, atime, mtime, cb); +}; +SFTPWrapper.prototype.utimes = function(path, atime, mtime, cb) { + return this._stream.utimes(path, atime, mtime, cb); +}; +SFTPWrapper.prototype.fchown = function(handle, uid, gid, cb) { + return this._stream.fchown(handle, uid, gid, cb); +}; +SFTPWrapper.prototype.chown = function(path, uid, gid, cb) { + return this._stream.chown(path, uid, gid, cb); +}; +SFTPWrapper.prototype.fchmod = function(handle, mode, cb) { + return this._stream.fchmod(handle, mode, cb); +}; +SFTPWrapper.prototype.chmod = function(path, mode, cb) { + return this._stream.chmod(path, mode, cb); +}; +SFTPWrapper.prototype.readlink = function(path, cb) { + return this._stream.readlink(path, cb); +}; +SFTPWrapper.prototype.symlink = function(targetPath, linkPath, cb) { + return this._stream.symlink(targetPath, linkPath, cb); +}; +SFTPWrapper.prototype.realpath = function(path, cb) { + return this._stream.realpath(path, cb); +}; +// extended requests +SFTPWrapper.prototype.ext_openssh_rename = function(oldPath, newPath, cb) { + return this._stream.ext_openssh_rename(oldPath, newPath, cb); +}; +SFTPWrapper.prototype.ext_openssh_statvfs = function(path, cb) { + return this._stream.ext_openssh_statvfs(path, cb); +}; +SFTPWrapper.prototype.ext_openssh_fstatvfs = function(handle, cb) { + return this._stream.ext_openssh_fstatvfs(handle, cb); +}; +SFTPWrapper.prototype.ext_openssh_hardlink = function(oldPath, newPath, cb) { + return this._stream.ext_openssh_hardlink(oldPath, newPath, cb); +}; +SFTPWrapper.prototype.ext_openssh_fsync = function(handle, cb) { + return this._stream.ext_openssh_fsync(handle, cb); +}; + +module.exports = SFTPWrapper; diff --git a/reverse_engineering/node_modules/ssh2/lib/agent.js b/reverse_engineering/node_modules/ssh2/lib/agent.js new file mode 100644 index 0000000..56985f0 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/agent.js @@ -0,0 +1,412 @@ +var Socket = require('net').Socket; +var EventEmitter = require('events').EventEmitter; +var inherits = require('util').inherits; +var path = require('path'); +var fs = require('fs'); +var cp = require('child_process'); + +var REQUEST_IDENTITIES = 11; +var IDENTITIES_ANSWER = 12; +var SIGN_REQUEST = 13; +var SIGN_RESPONSE = 14; +var FAILURE = 5; + +var RE_CYGWIN_SOCK = /^\!(\d+) s ([A-Z0-9]{8}\-[A-Z0-9]{8}\-[A-Z0-9]{8}\-[A-Z0-9]{8})/; + +module.exports = function(sockPath, key, keyType, data, cb) { + var sock; + var error; + var sig; + var datalen; + var keylen = 0; + var isSigning = Buffer.isBuffer(key); + var type; + var count = 0; + var siglen = 0; + var nkeys = 0; + var keys; + var comlen = 0; + var comment = false; + var accept; + var reject; + + if (typeof key === 'function' && typeof keyType === 'function') { + // agent forwarding + accept = key; + reject = keyType; + } else if (isSigning) { + keylen = key.length; + datalen = data.length; + } else { + cb = key; + key = undefined; + } + + function onconnect() { + var buf; + if (isSigning) { + /* + byte SSH2_AGENTC_SIGN_REQUEST + string key_blob + string data + uint32 flags + */ + var p = 9; + buf = new Buffer(4 + 1 + 4 + keylen + 4 + datalen + 4); + buf.writeUInt32BE(buf.length - 4, 0, true); + buf[4] = SIGN_REQUEST; + buf.writeUInt32BE(keylen, 5, true); + key.copy(buf, p); + buf.writeUInt32BE(datalen, p += keylen, true); + data.copy(buf, p += 4); + buf.writeUInt32BE(0, p += datalen, true); + sock.write(buf); + } else { + /* + byte SSH2_AGENTC_REQUEST_IDENTITIES + */ + sock.write(new Buffer([0, 0, 0, 1, REQUEST_IDENTITIES])); + } + } + function ondata(chunk) { + for (var i = 0, len = chunk.length; i < len; ++i) { + if (type === undefined) { + // skip over packet length + if (++count === 5) { + type = chunk[i]; + count = 0; + } + } else if (type === SIGN_RESPONSE) { + /* + byte SSH2_AGENT_SIGN_RESPONSE + string signature_blob + */ + if (!sig) { + siglen <<= 8; + siglen += chunk[i]; + if (++count === 4) { + sig = new Buffer(siglen); + count = 0; + } + } else { + sig[count] = chunk[i]; + if (++count === siglen) { + sock.removeAllListeners('data'); + return sock.destroy(); + } + } + } else if (type === IDENTITIES_ANSWER) { + /* + byte SSH2_AGENT_IDENTITIES_ANSWER + uint32 num_keys + + Followed by zero or more consecutive keys, encoded as: + + string public key blob + string public key comment + */ + if (keys === undefined) { + nkeys <<= 8; + nkeys += chunk[i]; + if (++count === 4) { + keys = new Array(nkeys); + count = 0; + if (nkeys === 0) { + sock.removeAllListeners('data'); + return sock.destroy(); + } + } + } else { + if (!key) { + keylen <<= 8; + keylen += chunk[i]; + if (++count === 4) { + key = new Buffer(keylen); + count = 0; + } + } else if (comment === false) { + key[count] = chunk[i]; + if (++count === keylen) { + keys[nkeys - 1] = key; + keylen = 0; + count = 0; + comment = true; + if (--nkeys === 0) { + key = undefined; + sock.removeAllListeners('data'); + return sock.destroy(); + } + } + } else if (comment === true) { + comlen <<= 8; + comlen += chunk[i]; + if (++count === 4) { + count = 0; + if (comlen > 0) + comment = comlen; + else { + key = undefined; + comment = false; + } + comlen = 0; + } + } else { + // skip comments + if (++count === comment) { + comment = false; + count = 0; + key = undefined; + } + } + } + } else if (type === FAILURE) { + if (isSigning) + error = new Error('Agent unable to sign data'); + else + error = new Error('Unable to retrieve list of keys from agent'); + sock.removeAllListeners('data'); + return sock.destroy(); + } + } + } + function onerror(err) { + error = err; + } + function onclose() { + if (error) + cb(error); + else if ((isSigning && !sig) || (!isSigning && !keys)) + cb(new Error('Unexpected disconnection from agent')); + else if (isSigning && sig) + cb(undefined, sig); + else if (!isSigning && keys) + cb(undefined, keys); + } + + if (process.platform === 'win32') { + if (sockPath === 'pageant') { + // Pageant (PuTTY authentication agent) + sock = new PageantSock(); + } else { + // cygwin ssh-agent instance + var triedCygpath = false; + fs.readFile(sockPath, function readCygsocket(err, data) { + if (err) { + if (triedCygpath) + return cb(new Error('Invalid cygwin unix socket path')); + // try using `cygpath` to convert a possible *nix-style path to the + // real Windows path before giving up ... + cp.exec('cygpath -w "' + sockPath + '"', + function(err, stdout, stderr) { + if (err || stdout.length === 0) + return cb(new Error('Invalid cygwin unix socket path')); + triedCygpath = true; + sockPath = stdout.toString().replace(/[\r\n]/g, ''); + fs.readFile(sockPath, readCygsocket); + }); + return; + } + + var m; + if (m = RE_CYGWIN_SOCK.exec(data.toString('ascii'))) { + var port; + var secret; + var secretbuf; + var state; + var bc = 0; + var isRetrying = false; + var inbuf = []; + var credsbuf = new Buffer(12); + var i; + var j; + + // use 0 for pid, uid, and gid to ensure we get an error and also + // a valid uid and gid from cygwin so that we don't have to figure it + // out ourselves + credsbuf.fill(0); + + // parse cygwin unix socket file contents + port = parseInt(m[1], 10); + secret = m[2].replace(/\-/g, ''); + secretbuf = new Buffer(16); + for (i = 0, j = 0; j < 32; ++i,j+=2) + secretbuf[i] = parseInt(secret.substring(j, j + 2), 16); + + // convert to host order (always LE for Windows) + for (i = 0; i < 16; i += 4) + secretbuf.writeUInt32LE(secretbuf.readUInt32BE(i, true), i, true); + + function _onconnect() { + bc = 0; + state = 'secret'; + sock.write(secretbuf); + } + function _ondata(data) { + bc += data.length; + if (state === 'secret') { + // the secret we sent is echoed back to us by cygwin, not sure of + // the reason for that, but we ignore it nonetheless ... + if (bc === 16) { + bc = 0; + state = 'creds'; + sock.write(credsbuf); + } + } else if (state === 'creds') { + // if this is the first attempt, make sure to gather the valid + // uid and gid for our next attempt + if (!isRetrying) + inbuf.push(data); + + if (bc === 12) { + sock.removeListener('connect', _onconnect); + sock.removeListener('data', _ondata); + sock.removeListener('close', _onclose); + if (isRetrying) { + addSockListeners(); + sock.emit('connect'); + } else { + isRetrying = true; + credsbuf = Buffer.concat(inbuf); + credsbuf.writeUInt32LE(process.pid, 0, true); + sock.destroy(); + tryConnect(); + } + } + } + } + function _onclose() { + cb(new Error('Problem negotiating cygwin unix socket security')); + } + function tryConnect() { + sock = new Socket(); + sock.once('connect', _onconnect); + sock.on('data', _ondata); + sock.once('close', _onclose); + sock.connect(port); + } + tryConnect(); + } else + cb(new Error('Malformed cygwin unix socket file')); + }); + return; + } + } else + sock = new Socket(); + + function addSockListeners() { + if (!accept && !reject) { + sock.once('connect', onconnect); + sock.on('data', ondata); + sock.once('error', onerror); + sock.once('close', onclose); + } else { + var chan; + sock.once('connect', function() { + chan = accept(); + var isDone = false; + function onDone() { + if (isDone) + return; + sock.destroy(); + isDone = true; + } + chan.once('end', onDone) + .once('close', onDone) + .on('data', function(data) { + sock.write(data); + }); + sock.on('data', function(data) { + chan.write(data); + }); + }); + sock.once('close', function() { + if (!chan) + reject(); + }); + } + } + addSockListeners(); + sock.connect(sockPath); +}; + + +// win32 only ------------------------------------------------------------------ +if (process.platform === 'win32') { + var RET_ERR_BADARGS = 10; + var RET_ERR_UNAVAILABLE = 11; + var RET_ERR_NOMAP = 12; + var RET_ERR_BINSTDIN = 13; + var RET_ERR_BINSTDOUT = 14; + var RET_ERR_BADLEN = 15; + + var ERROR = {}; + var EXEPATH = path.resolve(__dirname, '..', 'util/pagent.exe'); + ERROR[RET_ERR_BADARGS] = new Error('Invalid pagent.exe arguments'); + ERROR[RET_ERR_UNAVAILABLE] = new Error('Pageant is not running'); + ERROR[RET_ERR_NOMAP] = new Error('pagent.exe could not create an mmap'); + ERROR[RET_ERR_BINSTDIN] = new Error('pagent.exe could not set mode for stdin'); + ERROR[RET_ERR_BINSTDOUT] = new Error('pagent.exe could not set mode for stdout'); + ERROR[RET_ERR_BADLEN] = new Error('pagent.exe did not get expected input payload'); + + function PageantSock() { + this.proc = undefined; + this.buffer = null; + } + inherits(PageantSock, EventEmitter); + + PageantSock.prototype.write = function(buf) { + if (this.buffer === null) + this.buffer = buf; + else { + this.buffer = Buffer.concat([this.buffer, buf], + this.buffer.length + buf.length); + } + // Wait for at least all length bytes + if (this.buffer.length < 4) + return; + + var len = this.buffer.readUInt32BE(0, true); + // Make sure we have a full message before querying pageant + if ((this.buffer.length - 4) < len) + return; + + buf = this.buffer.slice(0, 4 + len); + if (this.buffer.length > (4 + len)) + this.buffer = this.buffer.slice(4 + len); + else + this.buffer = null; + + var self = this; + var proc; + var hadError = false; + proc = this.proc = cp.spawn(EXEPATH, [ buf.length ]); + proc.stdout.on('data', function(data) { + self.emit('data', data); + }); + proc.once('error', function(err) { + if (!hadError) { + hadError = true; + self.emit('error', err); + } + }); + proc.once('close', function(code) { + self.proc = undefined; + if (ERROR[code] && !hadError) { + hadError = true; + self.emit('error', ERROR[code]); + } + self.emit('close', hadError); + }); + proc.stdin.end(buf); + }; + PageantSock.prototype.end = PageantSock.prototype.destroy = function() { + this.buffer = null; + if (this.proc) { + this.proc.kill(); + this.proc = undefined; + } + }; + PageantSock.prototype.connect = function() { + this.emit('connect'); + }; +} diff --git a/reverse_engineering/node_modules/ssh2/lib/client.js b/reverse_engineering/node_modules/ssh2/lib/client.js new file mode 100644 index 0000000..1d49282 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/client.js @@ -0,0 +1,1522 @@ +var crypto = require('crypto'); +var Socket = require('net').Socket; +var dnsLookup = require('dns').lookup; +var EventEmitter = require('events').EventEmitter; +var inherits = require('util').inherits; +var HASHES = crypto.getHashes(); + +var ssh2_streams = require('ssh2-streams'); +var SSH2Stream = ssh2_streams.SSH2Stream; +var SFTPStream = ssh2_streams.SFTPStream; +var consts = ssh2_streams.constants; +var BUGS = consts.BUGS; +var ALGORITHMS = consts.ALGORITHMS; +var parseKey = ssh2_streams.utils.parseKey; +var decryptKey = ssh2_streams.utils.decryptKey; +var genPublicKey = ssh2_streams.utils.genPublicKey; + +var Channel = require('./Channel'); +var agentQuery = require('./agent'); +var SFTPWrapper = require('./SFTPWrapper'); + +var MAX_CHANNEL = Math.pow(2, 32) - 1; +var RE_OPENSSH = /^OpenSSH_(?:(?![0-4])\d)|(?:\d{2,})/; +var DEBUG_NOOP = function(msg) {}; + +function Client() { + if (!(this instanceof Client)) + return new Client(); + + EventEmitter.call(this); + + this.config = { + host: undefined, + port: undefined, + forceIPv4: undefined, + forceIPv6: undefined, + keepaliveCountMax: undefined, + keepaliveInterval: undefined, + readyTimeout: undefined, + + username: undefined, + password: undefined, + privateKey: undefined, + publicKey: undefined, + tryKeyboard: undefined, + agent: undefined, + allowAgentFwd: undefined, + + hostHashAlgo: undefined, + hostHashCb: undefined, + strictVendor: undefined, + debug: undefined + }; + + this._readyTimeout = undefined; + this._channels = undefined; + this._callbacks = undefined; + this._forwarding = undefined; + this._acceptX11 = undefined; + this._agentFwdEnabled = undefined; + this._curChan = undefined; + this._remoteVer = undefined; + + this._sshstream = undefined; + this._sock = undefined; + this._resetKA = undefined; +} +inherits(Client, EventEmitter); + +Client.prototype.connect = function(cfg) { + var self = this; + + if (this._sock && this._sock.writable) { + this.once('close', function() { + self.connect(cfg); + }); + this.end(); + return; + } + + this.config.host = cfg.hostname || cfg.host || 'localhost'; + this.config.port = cfg.port || 22; + this.config.forceIPv4 = cfg.forceIPv4 || false; + this.config.forceIPv6 = cfg.forceIPv6 || false; + this.config.keepaliveCountMax = (typeof cfg.keepaliveCountMax === 'number' + && cfg.keepaliveCountMax >= 0 + ? cfg.keepaliveCountMax + : 3); + this.config.keepaliveInterval = (typeof cfg.keepaliveInterval === 'number' + && cfg.keepaliveInterval > 0 + ? cfg.keepaliveInterval + : 0); + this.config.readyTimeout = (typeof cfg.readyTimeout === 'number' + && cfg.readyTimeout >= 0 + ? cfg.readyTimeout + : 20000); + + var algorithms = { + kex: undefined, + kexBuf: undefined, + cipher: undefined, + cipherBuf: undefined, + serverHostKey: undefined, + serverHostKeyBuf: undefined, + hmac: undefined, + hmacBuf: undefined, + compress: undefined, + compressBuf: undefined + }; + var i; + if (typeof cfg.algorithms === 'object' && cfg.algorithms !== null) { + var algosSupported; + var algoList; + + algoList = cfg.algorithms.kex; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_KEX; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported key exchange algorithm: ' + algoList[i]); + } + algorithms.kex = algoList; + } + + algoList = cfg.algorithms.cipher; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_CIPHER; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported cipher algorithm: ' + algoList[i]); + } + algorithms.cipher = algoList; + } + + algoList = cfg.algorithms.serverHostKey; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_SERVER_HOST_KEY; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) { + throw new Error('Unsupported server host key algorithm: ' + + algoList[i]); + } + } + algorithms.serverHostKey = algoList; + } + + algoList = cfg.algorithms.hmac; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_HMAC; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported HMAC algorithm: ' + algoList[i]); + } + algorithms.hmac = algoList; + } + + algoList = cfg.algorithms.compress; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_COMPRESS; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported compression algorithm: ' + algoList[i]); + } + algorithms.compress = algoList; + } + } + if (algorithms.compress === undefined) { + if (cfg.compress) { + algorithms.compress = ['zlib@openssh.com', 'zlib']; + if (cfg.compress !== 'force') + algorithms.compress.push('none'); + } else if (cfg.compress === false) + algorithms.compress = ['none']; + } + + this.config.username = cfg.username || cfg.user; + this.config.password = (typeof cfg.password === 'string' + ? cfg.password + : undefined); + this.config.privateKey = (typeof cfg.privateKey === 'string' + || Buffer.isBuffer(cfg.privateKey) + ? cfg.privateKey + : undefined); + this.config.publicKey = undefined; + this.config.localHostname = (typeof cfg.localHostname === 'string' + && cfg.localHostname.length + ? cfg.localHostname + : undefined); + this.config.localUsername = (typeof cfg.localUsername === 'string' + && cfg.localUsername.length + ? cfg.localUsername + : undefined); + this.config.tryKeyboard = (cfg.tryKeyboard === true); + this.config.agent = (typeof cfg.agent === 'string' && cfg.agent.length + ? cfg.agent + : undefined); + this.config.allowAgentFwd = (cfg.agentForward === true + && this.config.agent !== undefined); + + this.config.strictVendor = (typeof cfg.strictVendor === 'boolean' + ? cfg.strictVendor + : true); + + var debug = this.config.debug = (typeof cfg.debug === 'function' + ? cfg.debug + : DEBUG_NOOP); + + if (typeof this.config.username !== 'string') + throw new Error('Invalid username'); + + if (cfg.agentForward === true && !this.config.allowAgentFwd) + throw new Error('You must set a valid agent path to allow agent forwarding'); + + var callbacks = this._callbacks = []; + this._channels = {}; + this._forwarding = {}; + this._acceptX11 = 0; + this._agentFwdEnabled = false; + this._curChan = -1; + this._remoteVer = undefined; + + if (this.config.privateKey) { + var privKeyInfo = parseKey(this.config.privateKey); + if (privKeyInfo instanceof Error) + throw new Error('Cannot parse privateKey: ' + privKeyInfo.message); + if (!privKeyInfo.private) + throw new Error('privateKey value does not contain a (valid) private key'); + if (privKeyInfo.encryption) { + if (typeof cfg.passphrase !== 'string') + throw new Error('Encrypted private key detected, but no passphrase given'); + decryptKey(privKeyInfo, cfg.passphrase); + } + this.config.privateKey = privKeyInfo; + this.config.publicKey = genPublicKey(privKeyInfo); + } + + var stream = this._sshstream = new SSH2Stream({ + algorithms: algorithms, + debug: (debug === DEBUG_NOOP ? undefined : debug) + }); + var sock = this._sock = (cfg.sock || new Socket()); + + // drain stderr if we are connection hopping using an exec stream + if (this._sock.stderr) + this._sock.stderr.resume(); + + // keepalive-related + var kainterval = this.config.keepaliveInterval; + var kacountmax = this.config.keepaliveCountMax; + var kacount = 0; + var katimer; + function sendKA() { + if (++kacount > kacountmax) { + clearInterval(katimer); + if (sock.readable) { + var err = new Error('Keepalive timeout'); + err.level = 'client-timeout'; + self.emit('error', err); + sock.destroy(); + } + return; + } + if (sock.writable) { + // append dummy callback to keep correct callback order + callbacks.push(resetKA); + stream.ping(); + } else + clearInterval(katimer); + } + function resetKA() { + if (kainterval > 0) { + kacount = 0; + clearInterval(katimer); + if (sock.writable) + katimer = setInterval(sendKA, kainterval); + } + } + this._resetKA = resetKA; + + stream.on('USERAUTH_BANNER', function(msg) { + self.emit('banner', msg); + }); + + sock.on('connect', function() { + debug('DEBUG: Client: Connected'); + self.emit('connect'); + if (!cfg.sock) + stream.pipe(sock).pipe(stream); + }).on('timeout', function() { + self.emit('timeout'); + }).on('error', function(err) { + clearTimeout(self._readyTimeout); + err.level = 'client-socket'; + self.emit('error', err); + }).on('end', function() { + stream.unpipe(sock); + clearTimeout(self._readyTimeout); + clearInterval(katimer); + self.emit('end'); + }).on('close', function() { + stream.unpipe(sock); + clearTimeout(self._readyTimeout); + clearInterval(katimer); + self.emit('close'); + + // notify outstanding channel requests of disconnection ... + var callbacks_ = callbacks; + var err = new Error('No response from server'); + callbacks = self._callbacks = []; + for (i = 0; i < callbacks_.length; ++i) + callbacks_[i](err); + + // simulate error for any channels waiting to be opened. this is safe + // against successfully opened channels because the success and failure + // event handlers are automatically removed when a success/failure response + // is received + var chanNos = Object.keys(self._channels); + self._channels = {}; + for (i = 0; i < chanNos.length; ++i) { + stream.emit('CHANNEL_OPEN_FAILURE:' + chanNos[i], err); + // emitting CHANNEL_CLOSE should be safe too and should help for any + // special channels which might otherwise keep the process alive, such + // as agent forwarding channels which have open unix sockets ... + stream.emit('CHANNEL_CLOSE:' + chanNos[i]); + } + }); + stream.on('drain', function() { + self.emit('drain'); + }).once('header', function(header) { + self._remoteVer = header.versions.software; + if (header.greeting) + self.emit('greeting', header.greeting); + }).on('continue', function() { + self.emit('continue'); + }).on('error', function(err) { + err.level = 'protocol'; + self.emit('error', err); + }); + + if (typeof cfg.hostVerifier === 'function') { + if (HASHES.indexOf(cfg.hostHash) === -1) + throw new Error('Invalid host hash algorithm: ' + cfg.hostHash); + var hashCb = cfg.hostVerifier; + var hasher = crypto.createHash(cfg.hostHash); + stream.once('fingerprint', function(key, verify) { + hasher.update(key); + var ret = hashCb(hasher.digest('hex'), verify); + if (ret !== undefined) + verify(ret); + }); + } + + // begin authentication handling ============================================= + var auths = []; + var curAuth; + var agentKeys; + var agentKeyPos = 0; + if (this.config.password !== undefined) + auths.push('password'); + if (this.config.publicKey !== undefined) + auths.push('publickey'); + if (this.config.agent !== undefined) + auths.push('agent'); + if (this.config.tryKeyboard) + auths.push('keyboard-interactive'); + if (this.config.publicKey !== undefined + && this.config.localHostname !== undefined + && this.config.localUsername !== undefined) + auths.push('hostbased'); + auths.push('none'); + function tryNextAuth() { + // TODO: better shutdown + if (!auths.length) { + stream.removeListener('USERAUTH_FAILURE', onUSERAUTH_FAILURE); + stream.removeListener('USERAUTH_PK_OK', onUSERAUTH_PK_OK); + var err = new Error('All configured authentication methods failed'); + err.level = 'client-authentication'; + self.emit('error', err); + if (stream.writable) + self.end(); + return; + } + + curAuth = auths.shift(); + switch (curAuth) { + case 'password': + stream.authPassword(self.config.username, self.config.password); + break; + case 'publickey': + stream.authPK(self.config.username, self.config.publicKey); + stream.once('USERAUTH_PK_OK', onUSERAUTH_PK_OK); + break; + case 'hostbased': + function hostbasedCb(buf, cb) { + var algo; + switch (self.config.privateKey.fulltype) { + case 'ssh-rsa': + algo = 'RSA-SHA1'; + break; + case 'ssh-dss': + algo = 'DSA-SHA1'; + break; + case 'ecdsa-sha2-nistp256': + algo = 'sha256'; + break; + case 'ecdsa-sha2-nistp384': + algo = 'sha384'; + break; + case 'ecdsa-sha2-nistp521': + algo = 'sha512'; + break; + } + var signature = crypto.createSign(algo); + signature.update(buf); + signature = trySign(signature, self.config.privateKey.privateOrig); + if (signature instanceof Error) { + signature.message = 'Error while signing data with privateKey: ' + + signature.message; + signature.level = 'client-authentication'; + self.emit('error', signature); + return tryNextAuth(); + } + + cb(signature); + } + stream.authHostbased(self.config.username, + self.config.publicKey, + self.config.localHostname, + self.config.localUsername, + hostbasedCb); + break; + case 'agent': + agentQuery(self.config.agent, function(err, keys) { + if (err) { + err.level = 'agent'; + self.emit('error', err); + agentKeys = undefined; + return tryNextAuth(); + } else if (keys.length === 0) { + debug('DEBUG: Agent: No keys stored in agent'); + agentKeys = undefined; + return tryNextAuth(); + } + + agentKeys = keys; + agentKeyPos = 0; + + stream.authPK(self.config.username, keys[0]); + stream.once('USERAUTH_PK_OK', onUSERAUTH_PK_OK); + }); + break; + case 'keyboard-interactive': + stream.authKeyboard(self.config.username); + stream.on('USERAUTH_INFO_REQUEST', onUSERAUTH_INFO_REQUEST); + break; + case 'none': + stream.authNone(self.config.username); + break; + } + } + function tryNextAgentKey() { + if (curAuth === 'agent') { + if (agentKeyPos >= agentKeys.length) + return; + if (++agentKeyPos >= agentKeys.length) { + debug('DEBUG: Agent: No more keys left to try'); + debug('DEBUG: Client: agent auth failed'); + agentKeys = undefined; + tryNextAuth(); + } else { + debug('DEBUG: Agent: Trying key #' + (agentKeyPos + 1)); + stream.authPK(self.config.username, agentKeys[agentKeyPos]); + stream.once('USERAUTH_PK_OK', onUSERAUTH_PK_OK); + } + } + } + function onUSERAUTH_INFO_REQUEST(name, instructions, lang, prompts) { + var nprompts = (Array.isArray(prompts) ? prompts.length : 0); + if (nprompts === 0) { + debug('DEBUG: Client: Sending automatic USERAUTH_INFO_RESPONSE'); + return stream.authInfoRes(); + } + // we sent a keyboard-interactive user authentication request and now the + // server is sending us the prompts we need to present to the user + self.emit('keyboard-interactive', + name, + instructions, + lang, + prompts, + function(answers) { + stream.authInfoRes(answers); + }); + } + function onUSERAUTH_PK_OK() { + if (curAuth === 'agent') { + var agentKey = agentKeys[agentKeyPos]; + var keyLen = agentKey.readUInt32BE(0, true); + var pubKeyFullType = agentKey.toString('ascii', 4, 4 + keyLen); + var pubKeyType = pubKeyFullType.slice(4); + // Check that we support the key type first + switch (pubKeyFullType) { + case 'ssh-rsa': + case 'ssh-dss': + case 'ecdsa-sha2-nistp256': + case 'ecdsa-sha2-nistp384': + case 'ecdsa-sha2-nistp521': + break; + default: + debug('DEBUG: Agent: Skipping unsupported key type: ' + + pubKeyFullType); + return tryNextAgentKey(); + } + stream.authPK(self.config.username, + agentKey, + function(buf, cb) { + agentQuery(self.config.agent, + agentKey, + pubKeyType, + buf, + function(err, signed) { + if (err) { + err.level = 'agent'; + self.emit('error', err); + } else { + var sigFullTypeLen = signed.readUInt32BE(0, true); + if (4 + sigFullTypeLen + 4 < signed.length) { + var sigFullType = signed.toString('ascii', 4, 4 + sigFullTypeLen); + if (sigFullType !== pubKeyFullType) { + err = new Error('Agent key/signature type mismatch'); + err.level = 'agent'; + self.emit('error', err); + } else { + // skip algoLen + algo + sigLen + return cb(signed.slice(4 + sigFullTypeLen + 4)); + } + } + } + + tryNextAgentKey(); + }); + }); + } else if (curAuth === 'publickey') { + stream.authPK(self.config.username, + self.config.publicKey, + function(buf, cb) { + var algo; + switch (self.config.privateKey.fulltype) { + case 'ssh-rsa': + algo = 'RSA-SHA1'; + break; + case 'ssh-dss': + algo = 'DSA-SHA1'; + break; + case 'ecdsa-sha2-nistp256': + algo = 'sha256'; + break; + case 'ecdsa-sha2-nistp384': + algo = 'sha384'; + break; + case 'ecdsa-sha2-nistp521': + algo = 'sha512'; + break; + } + var signature = crypto.createSign(algo); + signature.update(buf); + signature = trySign(signature, self.config.privateKey.privateOrig); + if (signature instanceof Error) { + signature.message = 'Error while signing data with privateKey: ' + + signature.message; + signature.level = 'client-authentication'; + self.emit('error', signature); + return tryNextAuth(); + } + cb(signature); + }); + } + } + function onUSERAUTH_FAILURE(authsLeft, partial) { + stream.removeListener('USERAUTH_PK_OK', onUSERAUTH_PK_OK); + stream.removeListener('USERAUTH_INFO_REQUEST', onUSERAUTH_INFO_REQUEST); + if (curAuth === 'agent') { + debug('DEBUG: Client: Agent key #' + (agentKeyPos + 1) + ' failed'); + return tryNextAgentKey(); + } else + debug('DEBUG: Client: ' + curAuth + ' auth failed'); + + tryNextAuth(); + } + stream.once('USERAUTH_SUCCESS', function() { + auths = undefined; + stream.removeListener('USERAUTH_FAILURE', onUSERAUTH_FAILURE); + stream.removeListener('USERAUTH_INFO_REQUEST', onUSERAUTH_INFO_REQUEST); + /*if (self.config.agent && self._agentKeys) + self._agentKeys = undefined;*/ + + // start keepalive mechanism + resetKA(); + + clearTimeout(self._readyTimeout); + + self.emit('ready'); + }).on('USERAUTH_FAILURE', onUSERAUTH_FAILURE); + // end authentication handling =============================================== + + // handle initial handshake completion + stream.once('ready', function() { + stream.service('ssh-userauth'); + stream.once('SERVICE_ACCEPT', function(svcName) { + if (svcName === 'ssh-userauth') + tryNextAuth(); + }); + }); + + // handle incoming requests from server, typically a forwarded TCP or X11 + // connection + stream.on('CHANNEL_OPEN', function(info) { + onCHANNEL_OPEN(self, info); + }); + + // handle responses for tcpip-forward and other global requests + stream.on('REQUEST_SUCCESS', function(data) { + if (callbacks.length) + callbacks.shift()(false, data); + }).on('REQUEST_FAILURE', function() { + if (callbacks.length) + callbacks.shift()(true); + }); + + stream.on('GLOBAL_REQUEST', function(name, wantReply, data) { + // auto-reject all global requests, this can be especially useful if the + // server is sending us dummy keepalive global requests + if (wantReply) + stream.requestFailure(); + }); + + if (!cfg.sock) { + var host = this.config.host; + var forceIPv4 = this.config.forceIPv4; + var forceIPv6 = this.config.forceIPv6; + + debug('DEBUG: Client: Trying ' + + host + + ' on port ' + + this.config.port + + ' ...'); + + function doConnect() { + startTimeout(); + self._sock.connect(self.config.port, host); + self._sock.setNoDelay(true); + self._sock.setMaxListeners(0); + self._sock.setTimeout(typeof cfg.timeout === 'number' ? cfg.timeout : 0); + } + + if ((!forceIPv4 && !forceIPv6) || (forceIPv4 && forceIPv6)) + doConnect(); + else { + dnsLookup(host, (forceIPv4 ? 4 : 6), function(err, address, family) { + if (err) { + var error = new Error('Error while looking up ' + + (forceIPv4 ? 'IPv4' : 'IPv6') + + ' address for host ' + + host + + ': ' + err); + clearTimeout(self._readyTimeout); + error.level = 'client-dns'; + self.emit('error', error); + self.emit('close'); + return; + } + host = address; + doConnect(); + }); + } + } else { + startTimeout(); + stream.pipe(sock).pipe(stream); + } + + function startTimeout() { + if (self.config.readyTimeout > 0) { + self._readyTimeout = setTimeout(function() { + var err = new Error('Timed out while waiting for handshake'); + err.level = 'client-timeout'; + self.emit('error', err); + sock.destroy(); + }, self.config.readyTimeout); + } + } +}; + +Client.prototype.end = function() { + if (this._sock + && this._sock.writable + && this._sshstream + && this._sshstream.writable) + return this._sshstream.disconnect(); + return false; +}; + +Client.prototype.destroy = function() { + this._sock && this._sock.destroy(); +}; + +Client.prototype.exec = function(cmd, opts, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + if (typeof opts === 'function') { + cb = opts; + opts = {}; + } + + var self = this; + var extraOpts = { allowHalfOpen: (opts.allowHalfOpen !== false) }; + + return openChannel(this, 'session', extraOpts, function(err, chan) { + if (err) + return cb(err); + + var todo = []; + + function reqCb(err) { + if (err) { + chan.close(); + return cb(err); + } + if (todo.length) + todo.shift()(); + } + + if (self.config.allowAgentFwd === true + || (opts + && opts.agentForward === true + && self.config.agent !== undefined)) { + todo.push(function() { + reqAgentFwd(chan, reqCb); + }); + } + + if (typeof opts === 'object') { + if (typeof opts.env === 'object') + reqEnv(chan, opts.env); + if (typeof opts.pty === 'object' || opts.pty === true) + todo.push(function() { reqPty(chan, opts.pty, reqCb); }); + if (typeof opts.x11 === 'object' + || opts.x11 === 'number' + || opts.x11 === true) + todo.push(function() { reqX11(chan, opts.x11, reqCb); }); + } + + todo.push(function() { reqExec(chan, cmd, opts, cb); }); + todo.shift()(); + }); +}; + +Client.prototype.shell = function(wndopts, opts, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + // start an interactive terminal/shell session + var self = this; + + if (typeof wndopts === 'function') { + cb = wndopts; + wndopts = opts = undefined; + } else if (typeof opts === 'function') { + cb = opts; + opts = undefined; + } + if (wndopts && wndopts.x11 !== undefined) { + opts = wndopts; + wndopts = undefined; + } + + return openChannel(this, 'session', function(err, chan) { + if (err) + return cb(err); + + var todo = []; + + function reqCb(err) { + if (err) { + chan.close(); + return cb(err); + } + if (todo.length) + todo.shift()(); + } + + if (self.config.allowAgentFwd === true + || (opts + && opts.agentForward === true + && self.config.agent !== undefined)) { + todo.push(function() { + reqAgentFwd(chan, reqCb); + }); + } + + if (wndopts !== false) + todo.push(function() { reqPty(chan, wndopts, reqCb); }); + + if (typeof opts === 'object') { + if (typeof opts.x11 === 'object' + || opts.x11 === 'number' + || opts.x11 === true) + todo.push(function() { reqX11(chan, opts.x11, reqCb); }); + } + + todo.push(function() { reqShell(chan, cb); }); + todo.shift()(); + }); +}; + +Client.prototype.subsys = function(name, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + return openChannel(this, 'session', function(err, chan) { + if (err) + return cb(err); + + reqSubsystem(chan, name, function(err, stream) { + if (err) + return cb(err); + + cb(undefined, stream); + }); + }); +}; + +Client.prototype.sftp = function(cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + var self = this; + + // start an SFTP session + return openChannel(this, 'session', function(err, chan) { + if (err) + return cb(err); + + reqSubsystem(chan, 'sftp', function(err, stream) { + if (err) + return cb(err); + + var serverIdentRaw = self._sshstream._state.incoming.identRaw; + var cfg = { debug: self.config.debug }; + var sftp = new SFTPStream(cfg, serverIdentRaw); + + function onError(err) { + sftp.removeListener('ready', onReady); + stream.removeListener('exit', onExit); + cb(err); + } + + function onReady() { + sftp.removeListener('error', onError); + stream.removeListener('exit', onExit); + cb(undefined, new SFTPWrapper(sftp)); + } + + function onExit(code, signal) { + sftp.removeListener('ready', onReady); + sftp.removeListener('error', onError); + var msg; + if (typeof code === 'number') { + msg = 'Received exit code ' + + code + + ' while establishing SFTP session'; + } else { + msg = 'Received signal ' + + signal + + ' while establishing SFTP session'; + } + var err = new Error(msg); + err.code = code; + err.signal = signal; + cb(err); + } + + sftp.once('error', onError) + .once('ready', onReady) + .once('close', function() { + stream.end(); + }); + + // OpenSSH server sends an exit-status if there was a problem spinning up + // an sftp server child process, so we listen for that here in order to + // properly raise an error. + stream.once('exit', onExit); + + sftp.pipe(stream).pipe(sftp); + }); + }); +}; + +Client.prototype.forwardIn = function(bindAddr, bindPort, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + // send a request for the server to start forwarding TCP connections to us + // on a particular address and port + + var self = this; + var wantReply = (typeof cb === 'function'); + + if (wantReply) { + this._callbacks.push(function(had_err, data) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to bind to ' + bindAddr + ':' + bindPort)); + } + + var realPort = bindPort; + if (bindPort === 0 && data && data.length >= 4) { + realPort = data.readUInt32BE(0, true); + if (!(self._sshstream.remoteBugs & BUGS.DYN_RPORT_BUG)) + bindPort = realPort; + } + + self._forwarding[bindAddr + ':' + bindPort] = realPort; + + cb(undefined, realPort); + }); + } + + return this._sshstream.tcpipForward(bindAddr, bindPort, wantReply); +}; + +Client.prototype.unforwardIn = function(bindAddr, bindPort, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + // send a request to stop forwarding us new connections for a particular + // address and port + + var self = this; + var wantReply = (typeof cb === 'function'); + + if (wantReply) { + this._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to unbind from ' + + bindAddr + ':' + bindPort)); + } + + delete self._forwarding[bindAddr + ':' + bindPort]; + + cb(); + }); + } + + return this._sshstream.cancelTcpipForward(bindAddr, bindPort, wantReply); +}; + +Client.prototype.forwardOut = function(srcIP, srcPort, dstIP, dstPort, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + // send a request to forward a TCP connection to the server + + var cfg = { + srcIP: srcIP, + srcPort: srcPort, + dstIP: dstIP, + dstPort: dstPort + }; + + return openChannel(this, 'direct-tcpip', cfg, cb); +}; + +Client.prototype.openssh_noMoreSessions = function(cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + var wantReply = (typeof cb === 'function'); + + if (!this.config.strictVendor + || (this.config.strictVendor && RE_OPENSSH.test(this._remoteVer))) { + if (wantReply) { + this._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to disable future sessions')); + } + + cb(); + }); + } + + return this._sshstream.openssh_noMoreSessions(wantReply); + } else if (wantReply) { + process.nextTick(function() { + cb(new Error('strictVendor enabled and server is not OpenSSH or compatible version')); + }); + } + + return true; +}; + +Client.prototype.openssh_forwardInStreamLocal = function(socketPath, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + var wantReply = (typeof cb === 'function'); + + if (!this.config.strictVendor + || (this.config.strictVendor && RE_OPENSSH.test(this._remoteVer))) { + if (wantReply) { + this._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to bind to ' + socketPath)); + } + + cb(); + }); + } + + return this._sshstream.openssh_streamLocalForward(socketPath, wantReply); + } else if (wantReply) { + process.nextTick(function() { + cb(new Error('strictVendor enabled and server is not OpenSSH or compatible version')); + }); + } + + return true; +}; + +Client.prototype.openssh_unforwardInStreamLocal = function(socketPath, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + var wantReply = (typeof cb === 'function'); + + if (!this.config.strictVendor + || (this.config.strictVendor && RE_OPENSSH.test(this._remoteVer))) { + if (wantReply) { + this._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to unbind on ' + socketPath)); + } + + cb(); + }); + } + + return this._sshstream.openssh_cancelStreamLocalForward(socketPath, wantReply); + } else if (wantReply) { + process.nextTick(function() { + cb(new Error('strictVendor enabled and server is not OpenSSH or compatible version')); + }); + } + + return true; +}; + +Client.prototype.openssh_forwardOutStreamLocal = function(socketPath, cb) { + if (!this._sock + || !this._sock.writable + || !this._sshstream + || !this._sshstream.writable) + throw new Error('Not connected'); + + if (!this.config.strictVendor + || (this.config.strictVendor && RE_OPENSSH.test(this._remoteVer))) { + var cfg = { socketPath: socketPath }; + return openChannel(this, 'direct-streamlocal@openssh.com', cfg, cb); + } else { + process.nextTick(function() { + cb(new Error('strictVendor enabled and server is not OpenSSH or compatible version')); + }); + } + + return true; +}; + +function openChannel(self, type, opts, cb) { + // ask the server to open a channel for some purpose + // (e.g. session (sftp, exec, shell), or forwarding a TCP connection + var localChan = nextChannel(self); + var initWindow = Channel.MAX_WINDOW; + var maxPacket = Channel.PACKET_SIZE; + var ret = true; + + if (localChan === false) + return cb(new Error('No free channels available')); + + if (typeof opts === 'function') { + cb = opts; + opts = {}; + } + + self._channels[localChan] = true; + + var sshstream = self._sshstream; + sshstream.once('CHANNEL_OPEN_CONFIRMATION:' + localChan, onSuccess) + .once('CHANNEL_OPEN_FAILURE:' + localChan, onFailure) + .once('CHANNEL_CLOSE:' + localChan, onFailure); + + if (type === 'session') + ret = sshstream.session(localChan, initWindow, maxPacket); + else if (type === 'direct-tcpip') + ret = sshstream.directTcpip(localChan, initWindow, maxPacket, opts); + else if (type === 'direct-streamlocal@openssh.com') { + ret = sshstream.openssh_directStreamLocal(localChan, + initWindow, + maxPacket, + opts); + } + + return ret; + + function onSuccess(info) { + sshstream.removeListener('CHANNEL_OPEN_FAILURE:' + localChan, onFailure); + sshstream.removeListener('CHANNEL_CLOSE:' + localChan, onFailure); + + var chaninfo = { + type: type, + incoming: { + id: localChan, + window: initWindow, + packetSize: maxPacket, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + cb(undefined, new Channel(chaninfo, self)); + } + + function onFailure(info) { + sshstream.removeListener('CHANNEL_OPEN_CONFIRMATION:' + localChan, + onSuccess); + sshstream.removeListener('CHANNEL_OPEN_FAILURE:' + localChan, onFailure); + sshstream.removeListener('CHANNEL_CLOSE:' + localChan, onFailure); + + delete self._channels[localChan]; + + var err; + if (info instanceof Error) + err = info; + else if (typeof info === 'object' && info !== null) { + err = new Error('(SSH) Channel open failure: ' + info.description); + err.reason = info.reason; + err.lang = info.lang; + } else { + err = new Error('(SSH) Channel open failure: ' + + 'server closed channel unexpectedly'); + err.reason = err.lang = ''; + } + cb(err); + } +} + +function nextChannel(self) { + // get the next available channel number + + // optimized path + if (self._curChan < MAX_CHANNEL) + return ++self._curChan; + + // slower lookup path + for (var i = 0, channels = self._channels; i < MAX_CHANNEL; ++i) + if (!channels[i]) + return i; + + return false; +} + +function reqX11(chan, screen, cb) { + // asks server to start sending us X11 connections + var cfg = { + single: false, + protocol: 'MIT-MAGIC-COOKIE-1', + cookie: crypto.randomBytes(16).toString('hex'), + screen: (typeof screen === 'number' ? screen : 0) + }; + + if (typeof screen === 'function') + cb = screen; + else if (typeof screen === 'object') { + if (typeof screen.single === 'boolean') + cfg.single = screen.single; + if (typeof screen.screen === 'number') + cfg.screen = screen.screen; + } + + var wantReply = (typeof cb === 'function'); + + if (chan.outgoing.state !== 'open') { + wantReply && cb(new Error('Channel is not open')); + return true; + } + + if (wantReply) { + chan._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to request X11')); + } + + chan._hasX11 = true; + ++chan._client._acceptX11; + chan.once('close', function() { + if (chan._client._acceptX11) + --chan._client._acceptX11; + }); + + cb(); + }); + } + + return chan._client._sshstream.x11Forward(chan.outgoing.id, cfg, wantReply); +} + +function reqPty(chan, opts, cb) { + var rows = 24; + var cols = 80; + var width = 640; + var height = 480; + var term = 'vt100'; + + if (typeof opts === 'function') + cb = opts; + else if (typeof opts === 'object') { + if (typeof opts.rows === 'number') + rows = opts.rows; + if (typeof opts.cols === 'number') + cols = opts.cols; + if (typeof opts.width === 'number') + width = opts.width; + if (typeof opts.height === 'number') + height = opts.height; + if (typeof opts.term === 'string') + term = opts.term; + } + + var wantReply = (typeof cb === 'function'); + + if (chan.outgoing.state !== 'open') { + wantReply && cb(new Error('Channel is not open')); + return true; + } + + if (wantReply) { + chan._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to request a pseudo-terminal')); + } + cb(); + }); + } + + return chan._client._sshstream.pty(chan.outgoing.id, + rows, + cols, + height, + width, + term, + null, + wantReply); +} + +function reqAgentFwd(chan, cb) { + var wantReply = (typeof cb === 'function'); + + if (chan.outgoing.state !== 'open') { + wantReply && cb(new Error('Channel is not open')); + return true; + } else if (chan._client._agentFwdEnabled) { + wantReply && cb(false); + return true; + } + + chan._client._agentFwdEnabled = true; + + chan._callbacks.push(function(had_err) { + if (had_err) { + chan._client._agentFwdEnabled = false; + wantReply && cb(had_err !== true + ? had_err + : new Error('Unable to request agent forwarding')); + return; + } + + wantReply && cb(); + }); + + return chan._client._sshstream.openssh_agentForward(chan.outgoing.id, true); +} + +function reqShell(chan, cb) { + if (chan.outgoing.state !== 'open') { + cb(new Error('Channel is not open')); + return true; + } + chan._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to open shell')); + } + chan.subtype = 'shell'; + cb(undefined, chan); + }); + + return chan._client._sshstream.shell(chan.outgoing.id, true); +} + +function reqExec(chan, cmd, opts, cb) { + if (chan.outgoing.state !== 'open') { + cb(new Error('Channel is not open')); + return true; + } + chan._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to exec')); + } + chan.subtype = 'exec'; + chan.allowHalfOpen = (opts.allowHalfOpen !== false); + cb(undefined, chan); + }); + + return chan._client._sshstream.exec(chan.outgoing.id, cmd, true); +} + +function reqEnv(chan, env) { + if (chan.outgoing.state !== 'open') + return true; + var ret = true; + var keys = Object.keys(env || {}); + var key; + var val; + + for (var i = 0, len = keys.length; i < len; ++i) { + key = keys[i]; + val = env[key]; + ret = chan._client._sshstream.env(chan.outgoing.id, key, val, false); + } + + return ret; +} + +function reqSubsystem(chan, name, cb) { + if (chan.outgoing.state !== 'open') { + cb(new Error('Channel is not open')); + return true; + } + chan._callbacks.push(function(had_err) { + if (had_err) { + return cb(had_err !== true + ? had_err + : new Error('Unable to start subsystem: ' + name)); + } + chan.subtype = 'subsystem'; + cb(undefined, chan); + }); + + return chan._client._sshstream.subsystem(chan.outgoing.id, name, true); +} + +function onCHANNEL_OPEN(self, info) { + // the server is trying to open a channel with us, this is usually when + // we asked the server to forward us connections on some port and now they + // are asking us to accept/deny an incoming connection on their side + + var localChan = false; + var reason; + + function accept() { + var chaninfo = { + type: info.type, + incoming: { + id: localChan, + window: Channel.MAX_WINDOW, + packetSize: Channel.PACKET_SIZE, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + var stream = new Channel(chaninfo, self); + + self._sshstream.channelOpenConfirm(info.sender, + localChan, + Channel.MAX_WINDOW, + Channel.PACKET_SIZE); + return stream; + } + function reject() { + if (reason === undefined) { + if (localChan === false) + reason = consts.CHANNEL_OPEN_FAILURE.RESOURCE_SHORTAGE; + else + reason = consts.CHANNEL_OPEN_FAILURE.CONNECT_FAILED; + } + + self._sshstream.channelOpenFail(info.sender, reason, '', ''); + } + + if (info.type === 'forwarded-tcpip' + || info.type === 'x11' + || info.type === 'auth-agent@openssh.com') { + // check for conditions for automatic rejection + var rejectConn = ((info.type === 'forwarded-tcpip' + && self._forwarding[info.data.destIP + + ':' + + info.data.destPort] === undefined) + || (info.type === 'x11' && self._acceptX11 === 0) + || (info.type === 'auth-agent@openssh.com' + && !self._agentFwdEnabled)); + if (!rejectConn) { + localChan = nextChannel(self); + + if (localChan === false) { + self.config.debug('DEBUG: Client: Automatic rejection of incoming channel open: no channels available'); + rejectConn = true; + } else + self._channels[localChan] = true; + } else { + reason = consts.CHANNEL_OPEN_FAILURE.ADMINISTRATIVELY_PROHIBITED; + self.config.debug('DEBUG: Client: Automatic rejection of incoming channel open: unexpected channel open for: ' + + info.type); + } + + // TODO: automatic rejection after some timeout? + + if (rejectConn) + reject(); + + if (localChan !== false) { + if (info.type === 'forwarded-tcpip') { + if (info.data.destPort === 0) { + info.data.destPort = self._forwarding[info.data.destIP + + ':' + + info.data.destPort]; + } + self.emit('tcp connection', info.data, accept, reject); + } else if (info.type === 'x11') { + self.emit('x11', info.data, accept, reject); + } else { + agentQuery(self.config.agent, accept, reject); + } + } + } else { + // automatically reject any unsupported channel open requests + self.config.debug('DEBUG: Client: Automatic rejection of incoming channel open: unsupported type: ' + + info.type); + reason = consts.CHANNEL_OPEN_FAILURE.UNKNOWN_CHANNEL_TYPE; + reject(); + } +} + +function trySign(sig, key) { + try { + return sig.sign(key); + } catch (err) { + return err; + } +} + +Client.Client = Client; +Client.Server = require('./server'); +// pass some useful utilities on to end user (e.g. parseKey(), genPublicKey()) +Client.utils = ssh2_streams.utils; +// expose useful SFTPStream constants for sftp server usage +Client.SFTP_STATUS_CODE = SFTPStream.STATUS_CODE; +Client.SFTP_OPEN_MODE = SFTPStream.OPEN_MODE; + +module.exports = Client; // backwards compatibility diff --git a/reverse_engineering/node_modules/ssh2/lib/keepalivemgr.js b/reverse_engineering/node_modules/ssh2/lib/keepalivemgr.js new file mode 100644 index 0000000..28572ab --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/keepalivemgr.js @@ -0,0 +1,76 @@ +var spliceOne = require('./utils').spliceOne; + +function Manager(interval, streamInterval, kaCountMax) { + var streams = this._streams = []; + this._timer = undefined; + this._timerInterval = interval; + this._timerfn = function() { + var now = Date.now(); + for (var i = 0, len = streams.length, s, last; i < len; ++i) { + s = streams[i]; + last = s._kalast; + if (last && (now - last) >= streamInterval) { + if (++s._kacnt > kaCountMax) { + var err = new Error('Keepalive timeout'); + err.level = 'client-timeout'; + s.emit('error', err); + s.disconnect(); + spliceOne(streams, i); + --i; + len = streams.length; + } else { + s._kalast = now; + // XXX: if the server ever starts sending real global requests to the + // client, we will need to add a dummy callback here to keep the + // correct reply order + s.ping(); + } + } + } + }; +} + +Manager.prototype.start = function() { + if (this._timer) + this.stop(); + this._timer = setInterval(this._timerfn, this._timerInterval); +}; + +Manager.prototype.stop = function() { + if (this._timer) { + clearInterval(this._timer); + this._timer = undefined; + } +}; + +Manager.prototype.add = function(stream) { + var streams = this._streams, + self = this; + + stream.once('end', function() { + self.remove(stream); + }).on('packet', resetKA); + + streams[streams.length] = stream; + + resetKA(); + + if (!this._timer) + this.start(); + + function resetKA() { + stream._kalast = Date.now(); + stream._kacnt = 0; + } +}; + +Manager.prototype.remove = function(stream) { + var streams = this._streams, + index = streams.indexOf(stream); + if (index > -1) + spliceOne(streams, index); + if (!streams.length) + this.stop(); +}; + +module.exports = Manager; diff --git a/reverse_engineering/node_modules/ssh2/lib/server.js b/reverse_engineering/node_modules/ssh2/lib/server.js new file mode 100644 index 0000000..6d359d0 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/server.js @@ -0,0 +1,1156 @@ +var net = require('net'); +var EventEmitter = require('events').EventEmitter; +var listenerCount = EventEmitter.listenerCount; +var inherits = require('util').inherits; + +var ssh2_streams = require('ssh2-streams'); +var parseKey = ssh2_streams.utils.parseKey; +var genPublicKey = ssh2_streams.utils.genPublicKey; +var decryptKey = ssh2_streams.utils.decryptKey; +var SSH2Stream = ssh2_streams.SSH2Stream; +var SFTPStream = ssh2_streams.SFTPStream; +var consts = ssh2_streams.constants; +var DISCONNECT_REASON = consts.DISCONNECT_REASON; +var CHANNEL_OPEN_FAILURE = consts.CHANNEL_OPEN_FAILURE; +var ALGORITHMS = consts.ALGORITHMS; + +var Channel = require('./Channel'); +var KeepaliveManager = require('./keepalivemgr'); + +var MAX_CHANNEL = Math.pow(2, 32) - 1; +var MAX_PENDING_AUTHS = 10; + +var kaMgr; + +function Server(cfg, listener) { + if (!(this instanceof Server)) + return new Server(cfg, listener); + + var hostKeys = { + 'ssh-rsa': null, + 'ssh-dss': null, + 'ecdsa-sha2-nistp256': null, + 'ecdsa-sha2-nistp384': null, + 'ecdsa-sha2-nistp521': null + }; + + var hostKeys_ = cfg.hostKeys; + if (!Array.isArray(hostKeys_)) + throw new Error('hostKeys must be an array'); + + var i; + for (i = 0; i < hostKeys_.length; ++i) { + var privateKey; + if (Buffer.isBuffer(hostKeys_[i]) || typeof hostKeys_[i] === 'string') + privateKey = parseKey(hostKeys_[i]); + else + privateKey = parseKey(hostKeys_[i].key); + if (privateKey instanceof Error) + throw new Error('Cannot parse privateKey: ' + privateKey.message); + if (!privateKey.private) + throw new Error('privateKey value contains an invalid private key'); + if (hostKeys[privateKey.fulltype]) + continue; + if (privateKey.encryption) { + if (typeof hostKeys_[i].passphrase !== 'string') + throw new Error('Missing passphrase for encrypted private key'); + decryptKey(privateKey, hostKeys_[i].passphrase); + } + hostKeys[privateKey.fulltype] = { + privateKey: privateKey, + publicKey: genPublicKey(privateKey) + }; + } + + var algorithms = { + kex: undefined, + kexBuf: undefined, + cipher: undefined, + cipherBuf: undefined, + serverHostKey: undefined, + serverHostKeyBuf: undefined, + hmac: undefined, + hmacBuf: undefined, + compress: undefined, + compressBuf: undefined + }; + if (typeof cfg.algorithms === 'object' && cfg.algorithms !== null) { + var algosSupported; + var algoList; + + algoList = cfg.algorithms.kex; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_KEX; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported key exchange algorithm: ' + algoList[i]); + } + algorithms.kex = algoList; + } + + algoList = cfg.algorithms.cipher; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_CIPHER; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported cipher algorithm: ' + algoList[i]); + } + algorithms.cipher = algoList; + } + + algoList = cfg.algorithms.serverHostKey; + var copied = false; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_SERVER_HOST_KEY; + for (i = algoList.length - 1; i >= 0; --i) { + if (algosSupported.indexOf(algoList[i]) === -1) { + throw new Error('Unsupported server host key algorithm: ' + + algoList[i]); + } + if (!hostKeys[algoList[i]]) { + // Silently discard for now + if (!copied) { + algoList = algoList.slice(); + copied = true; + } + algoList.splice(i, 1); + } + } + if (algoList.length > 0) + algorithms.serverHostKey = algoList; + } + + algoList = cfg.algorithms.hmac; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_HMAC; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported HMAC algorithm: ' + algoList[i]); + } + algorithms.hmac = algoList; + } + + algoList = cfg.algorithms.compress; + if (Array.isArray(algoList) && algoList.length > 0) { + algosSupported = ALGORITHMS.SUPPORTED_COMPRESS; + for (i = 0; i < algoList.length; ++i) { + if (algosSupported.indexOf(algoList[i]) === -1) + throw new Error('Unsupported compression algorithm: ' + algoList[i]); + } + algorithms.compress = algoList; + } + } + + // Make sure we at least have some kind of valid list of support key + // formats + if (algorithms.serverHostKey === undefined) { + var hostKeyAlgos = Object.keys(hostKeys); + for (i = hostKeyAlgos.length - 1; i >= 0; --i) { + if (!hostKeys[hostKeyAlgos[i]]) + hostKeyAlgos.splice(i, 1); + } + algorithms.serverHostKey = hostKeyAlgos; + } + + if (!kaMgr + && Server.KEEPALIVE_INTERVAL > 0 + && Server.KEEPALIVE_CLIENT_INTERVAL > 0 + && Server.KEEPALIVE_CLIENT_COUNT_MAX >= 0) { + kaMgr = new KeepaliveManager(Server.KEEPALIVE_INTERVAL, + Server.KEEPALIVE_CLIENT_INTERVAL, + Server.KEEPALIVE_CLIENT_COUNT_MAX); + } + + var self = this; + + EventEmitter.call(this); + + if (typeof listener === 'function') + self.on('connection', listener); + + var streamcfg = { + algorithms: algorithms, + hostKeys: hostKeys, + server: true + }; + var keys; + var len; + for (i = 0, keys = Object.keys(cfg), len = keys.length; i < len; ++i) { + var key = keys[i]; + if (key === 'privateKey' + || key === 'publicKey' + || key === 'passphrase' + || key === 'algorithms' + || key === 'hostKeys' + || key === 'server') { + continue; + } + streamcfg[key] = cfg[key]; + } + + if (typeof streamcfg.debug === 'function') { + var oldDebug = streamcfg.debug; + var cfgKeys = Object.keys(streamcfg); + } + + this._srv = new net.Server(function(socket) { + if (self._connections >= self.maxConnections) { + socket.destroy(); + return; + } + ++self._connections; + socket.once('close', function(had_err) { + --self._connections; + + // since joyent/node#993bb93e0a, we have to "read past EOF" in order to + // get an `end` event on streams. thankfully adding this does not + // negatively affect node versions pre-joyent/node#993bb93e0a. + sshstream.read(); + }).on('error', function(err) { + sshstream.reset(); + sshstream.emit('error', err); + }); + + var conncfg = streamcfg; + + // prepend debug output with a unique identifier in case there are multiple + // clients connected at the same time + if (oldDebug) { + conncfg = {}; + for (var i = 0, key; i < cfgKeys.length; ++i) { + key = cfgKeys[i]; + conncfg[key] = streamcfg[key]; + } + var debugPrefix = '[' + process.hrtime().join('.') + '] '; + conncfg.debug = function(msg) { + oldDebug(debugPrefix + msg); + }; + } + + var sshstream = new SSH2Stream(conncfg); + var client = new Client(sshstream, socket); + + socket.pipe(sshstream).pipe(socket); + + // silence pre-header errors + function onClientPreHeaderError(err) {} + client.on('error', onClientPreHeaderError); + + sshstream.once('header', function(header) { + if (sshstream._readableState.ended) { + // already disconnected internally in SSH2Stream due to incompatible + // protocol version + return; + } else if (!listenerCount(self, 'connection')) { + // auto reject + return sshstream.disconnect(DISCONNECT_REASON.BY_APPLICATION); + } + + client.removeListener('error', onClientPreHeaderError); + + self.emit('connection', + client, + { ip: socket.remoteAddress, header: header }); + }); + }).on('error', function(err) { + self.emit('error', err); + }).on('listening', function() { + self.emit('listening'); + }).on('close', function() { + self.emit('close'); + }); + this._connections = 0; + this.maxConnections = Infinity; +} +inherits(Server, EventEmitter); + +Server.prototype.listen = function() { + this._srv.listen.apply(this._srv, arguments); + return this; +}; + +Server.prototype.address = function() { + return this._srv.address(); +}; + +Server.prototype.getConnections = function(cb) { + this._srv.getConnections(cb); +}; + +Server.prototype.close = function(cb) { + this._srv.close(cb); + return this; +}; + +Server.prototype.ref = function() { + this._srv.ref(); +}; + +Server.prototype.unref = function() { + this._srv.unref(); +}; + + +function Client(stream, socket) { + EventEmitter.call(this); + + var self = this; + + this._sshstream = stream; + var channels = this._channels = {}; + this._curChan = -1; + this._sock = socket; + this.noMoreSessions = false; + this.authenticated = false; + + stream.on('end', function() { + self.emit('end'); + }).on('close', function(hasErr) { + self.emit('close', hasErr); + }).on('error', function(err) { + self.emit('error', err); + }).on('drain', function() { + self.emit('drain'); + }).on('continue', function() { + self.emit('continue'); + }); + + var exchanges = 0; + var acceptedAuthSvc = false; + var pendingAuths = []; + var authCtx; + + // begin service/auth-related ================================================ + stream.on('SERVICE_REQUEST', function(service) { + if (exchanges === 0 + || acceptedAuthSvc + || self.authenticated + || service !== 'ssh-userauth') + return stream.disconnect(DISCONNECT_REASON.SERVICE_NOT_AVAILABLE); + + acceptedAuthSvc = true; + stream.serviceAccept(service); + }).on('USERAUTH_REQUEST', onUSERAUTH_REQUEST); + function onUSERAUTH_REQUEST(username, service, method, methodData) { + if (exchanges === 0 + || (authCtx + && (authCtx.username !== username || authCtx.service !== service)) + // TODO: support hostbased auth + || (method !== 'password' + && method !== 'publickey' + && method !== 'hostbased' + && method !== 'keyboard-interactive' + && method !== 'none') + || pendingAuths.length === MAX_PENDING_AUTHS) + return stream.disconnect(DISCONNECT_REASON.PROTOCOL_ERROR); + else if (service !== 'ssh-connection') + return stream.disconnect(DISCONNECT_REASON.SERVICE_NOT_AVAILABLE); + + // XXX: this really shouldn't be reaching into private state ... + stream._state.authMethod = method; + + var ctx; + if (method === 'keyboard-interactive') { + ctx = new KeyboardAuthContext(stream, username, service, method, + methodData, onAuthDecide); + } else if (method === 'publickey') { + ctx = new PKAuthContext(stream, username, service, method, methodData, + onAuthDecide); + } else if (method === 'hostbased') { + ctx = new HostbasedAuthContext(stream, username, service, method, + methodData, onAuthDecide); + } else if (method === 'password') { + ctx = new PwdAuthContext(stream, username, service, method, methodData, + onAuthDecide); + } else if (method === 'none') + ctx = new AuthContext(stream, username, service, method, onAuthDecide); + + if (authCtx) { + if (!authCtx._initialResponse) + return pendingAuths.push(ctx); + else if (authCtx._multistep && !this._finalResponse) { + // RFC 4252 says to silently abort the current auth request if a new + // auth request comes in before the final response from an auth method + // that requires additional request/response exchanges -- this means + // keyboard-interactive for now ... + authCtx._cleanup && authCtx._cleanup(); + authCtx.emit('abort'); + } + } + + authCtx = ctx; + + if (listenerCount(self, 'authentication')) + self.emit('authentication', authCtx); + else + authCtx.reject(); + } + function onAuthDecide(ctx, allowed, methodsLeft, isPartial) { + if (authCtx === ctx && !self.authenticated) { + if (allowed) { + stream.removeListener('USERAUTH_REQUEST', onUSERAUTH_REQUEST); + authCtx = undefined; + self.authenticated = true; + stream.authSuccess(); + pendingAuths = []; + self.emit('ready'); + } else { + stream.authFailure(methodsLeft, isPartial); + if (pendingAuths.length) { + authCtx = pendingAuths.pop(); + if (listenerCount(self, 'authentication')) + self.emit('authentication', authCtx); + else + authCtx.reject(); + } + } + } + } + // end service/auth-related ================================================== + + var unsentGlobalRequestsReplies = []; + + function sendReplies() { + var reply; + while (unsentGlobalRequestsReplies.length > 0 + && unsentGlobalRequestsReplies[0].type) { + reply = unsentGlobalRequestsReplies.shift(); + if (reply.type === 'SUCCESS') + stream.requestSuccess(reply.buf); + if (reply.type === 'FAILURE') + stream.requestFailure(); + } + } + + stream.on('GLOBAL_REQUEST', function(name, wantReply, data) { + var reply = { + type: null, + buf: null + }; + + function setReply(type, buf) { + reply.type = type; + reply.buf = buf; + sendReplies(); + } + + if (wantReply) + unsentGlobalRequestsReplies.push(reply); + + if ((name === 'tcpip-forward' + || name === 'cancel-tcpip-forward' + || name === 'no-more-sessions@openssh.com' + || name === 'streamlocal-forward@openssh.com' + || name === 'cancel-streamlocal-forward@openssh.com') + && listenerCount(self, 'request') + && self.authenticated) { + var accept; + var reject; + + if (wantReply) { + var replied = false; + accept = function(chosenPort) { + if (replied) + return; + replied = true; + var bufPort; + if (name === 'tcpip-forward' + && data.bindPort === 0 + && typeof chosenPort === 'number') { + bufPort = new Buffer(4); + bufPort.writeUInt32BE(chosenPort, 0, true); + } + setReply('SUCCESS', bufPort); + }; + reject = function() { + if (replied) + return; + replied = true; + setReply('FAILURE'); + }; + } + + if (name === 'no-more-sessions@openssh.com') { + self.noMoreSessions = true; + accept && accept(); + return; + } + + self.emit('request', accept, reject, name, data); + } else if (wantReply) + setReply('FAILURE'); + }); + + stream.on('CHANNEL_OPEN', function(info) { + // do early reject in some cases to prevent wasteful channel allocation + if ((info.type === 'session' && self.noMoreSessions) + || !self.authenticated) { + var reasonCode = CHANNEL_OPEN_FAILURE.ADMINISTRATIVELY_PROHIBITED; + return stream.channelOpenFail(info.sender, reasonCode); + } + + var localChan = nextChannel(self); + var accept; + var reject; + var replied = false; + if (localChan === false) { + // auto-reject due to no channels available + return stream.channelOpenFail(info.sender, + CHANNEL_OPEN_FAILURE.RESOURCE_SHORTAGE); + } + + // be optimistic, reserve channel to prevent another request from trying to + // take the same channel + channels[localChan] = true; + + reject = function() { + if (replied) + return; + + replied = true; + + delete channels[localChan]; + + var reasonCode = CHANNEL_OPEN_FAILURE.ADMINISTRATIVELY_PROHIBITED; + return stream.channelOpenFail(info.sender, reasonCode); + }; + + switch (info.type) { + case 'session': + if (listenerCount(self, 'session')) { + accept = function() { + if (replied) + return; + + replied = true; + + stream.channelOpenConfirm(info.sender, + localChan, + Channel.MAX_WINDOW, + Channel.PACKET_SIZE); + + return new Session(self, info, localChan); + }; + + self.emit('session', accept, reject); + } else + reject(); + break; + case 'direct-tcpip': + if (listenerCount(self, 'tcpip')) { + accept = function() { + if (replied) + return; + + replied = true; + + stream.channelOpenConfirm(info.sender, + localChan, + Channel.MAX_WINDOW, + Channel.PACKET_SIZE); + + var chaninfo = { + type: undefined, + incoming: { + id: localChan, + window: Channel.MAX_WINDOW, + packetSize: Channel.PACKET_SIZE, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + + return new Channel(chaninfo, self); + }; + + self.emit('tcpip', accept, reject, info.data); + } else + reject(); + break; + case 'direct-streamlocal@openssh.com': + if (listenerCount(self, 'openssh.streamlocal')) { + accept = function() { + if (replied) + return; + + replied = true; + + stream.channelOpenConfirm(info.sender, + localChan, + Channel.MAX_WINDOW, + Channel.PACKET_SIZE); + + var chaninfo = { + type: undefined, + incoming: { + id: localChan, + window: Channel.MAX_WINDOW, + packetSize: Channel.PACKET_SIZE, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + + return new Channel(chaninfo, self); + }; + + self.emit('openssh.streamlocal', accept, reject, info.data); + } else + reject(); + break; + default: + // auto-reject unsupported channel types + reject(); + } + }); + + stream.on('NEWKEYS', function() { + if (++exchanges > 1) + self.emit('rekey'); + }); + + if (kaMgr) { + this.once('ready', function() { + kaMgr.add(stream); + }); + } +} +inherits(Client, EventEmitter); + +Client.prototype.end = function() { + return this._sshstream.disconnect(DISCONNECT_REASON.BY_APPLICATION); +}; + +Client.prototype.x11 = function(originAddr, originPort, cb) { + var opts = { + originAddr: originAddr, + originPort: originPort + }; + return openChannel(this, 'x11', opts, cb); +}; + +Client.prototype.forwardOut = function(boundAddr, boundPort, remoteAddr, + remotePort, cb) { + var opts = { + boundAddr: boundAddr, + boundPort: boundPort, + remoteAddr: remoteAddr, + remotePort: remotePort + }; + return openChannel(this, 'forwarded-tcpip', opts, cb); +}; + +Client.prototype.openssh_forwardOutStreamLocal = function(socketPath, cb) { + var opts = { + socketPath: socketPath + }; + return openChannel(this, 'forwarded-streamlocal@openssh.com', opts, cb); +}; + +Client.prototype.rekey = function(cb) { + var stream = this._sshstream; + var ret = true; + var error; + + try { + ret = stream.rekey(); + } catch (ex) { + error = ex; + } + + // TODO: re-throw error if no callback? + + if (typeof cb === 'function') { + if (error) { + process.nextTick(function() { + cb(error); + }); + } else + this.once('rekey', cb); + } + + return ret; +}; + +function Session(client, info, localChan) { + this.subtype = undefined; + + var ending = false; + var self = this; + var outgoingId = info.sender; + var channel; + + var chaninfo = { + type: 'session', + incoming: { + id: localChan, + window: Channel.MAX_WINDOW, + packetSize: Channel.PACKET_SIZE, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + + function onREQUEST(info) { + var replied = false; + var accept; + var reject; + + if (info.wantReply) { + // "real session" requests will have custom accept behaviors + if (info.request !== 'shell' + && info.request !== 'exec' + && info.request !== 'subsystem') { + accept = function() { + if (replied || ending || channel) + return; + + replied = true; + + return client._sshstream.channelSuccess(outgoingId); + }; + } + + reject = function() { + if (replied || ending || channel) + return; + + replied = true; + + return client._sshstream.channelFailure(outgoingId); + }; + } + + if (ending) { + reject && reject(); + return; + } + + switch (info.request) { + // "pre-real session start" requests + case 'env': + if (listenerCount(self, 'env')) { + self.emit('env', accept, reject, { + key: info.key, + val: info.val + }); + } else + reject && reject(); + break; + case 'pty-req': + if (listenerCount(self, 'pty')) { + self.emit('pty', accept, reject, { + cols: info.cols, + rows: info.rows, + width: info.width, + height: info.height, + term: info.term, + modes: info.modes, + }); + } else + reject && reject(); + break; + case 'window-change': + if (listenerCount(self, 'window-change')) { + self.emit('window-change', accept, reject, { + cols: info.cols, + rows: info.rows, + width: info.width, + height: info.height + }); + } else + reject && reject(); + break; + case 'x11-req': + if (listenerCount(self, 'x11')) { + self.emit('x11', accept, reject, { + single: info.single, + protocol: info.protocol, + cookie: info.cookie, + screen: info.screen + }); + } else + reject && reject(); + break; + // "post-real session start" requests + case 'signal': + if (listenerCount(self, 'signal')) { + self.emit('signal', accept, reject, { + name: info.signal + }); + } else + reject && reject(); + break; + // XXX: is `auth-agent-req@openssh.com` really "post-real session start"? + case 'auth-agent-req@openssh.com': + if (listenerCount(self, 'auth-agent')) + self.emit('auth-agent', accept, reject); + else + reject && reject(); + break; + // "real session start" requests + case 'shell': + if (listenerCount(self, 'shell')) { + accept = function() { + if (replied || ending || channel) + return; + + replied = true; + + if (info.wantReply) + client._sshstream.channelSuccess(outgoingId); + + channel = new Channel(chaninfo, client, { server: true }); + + channel.subtype = self.subtype = info.request; + + return channel; + }; + + self.emit('shell', accept, reject); + } else + reject && reject(); + break; + case 'exec': + if (listenerCount(self, 'exec')) { + accept = function() { + if (replied || ending || channel) + return; + + replied = true; + + if (info.wantReply) + client._sshstream.channelSuccess(outgoingId); + + channel = new Channel(chaninfo, client, { server: true }); + + channel.subtype = self.subtype = info.request; + + return channel; + }; + + self.emit('exec', accept, reject, { + command: info.command + }); + } else + reject && reject(); + break; + case 'subsystem': + accept = function() { + if (replied || ending || channel) + return; + + replied = true; + + if (info.wantReply) + client._sshstream.channelSuccess(outgoingId); + + channel = new Channel(chaninfo, client, { server: true }); + + channel.subtype = self.subtype = (info.request + ':' + info.subsystem); + + if (info.subsystem === 'sftp') { + var sftp = new SFTPStream({ + server: true, + debug: client._sshstream.debug + }); + channel.pipe(sftp).pipe(channel); + + return sftp; + } else + return channel; + }; + + if (info.subsystem === 'sftp' && listenerCount(self, 'sftp')) + self.emit('sftp', accept, reject); + else if (info.subsystem !== 'sftp' && listenerCount(self, 'subsystem')) { + self.emit('subsystem', accept, reject, { + name: info.subsystem + }); + } else + reject && reject(); + break; + default: + reject && reject(); + } + } + function onEOF() { + ending = true; + self.emit('eof'); + self.emit('end'); + } + function onCLOSE() { + ending = true; + self.emit('close'); + } + client._sshstream + .on('CHANNEL_REQUEST:' + localChan, onREQUEST) + .once('CHANNEL_EOF:' + localChan, onEOF) + .once('CHANNEL_CLOSE:' + localChan, onCLOSE); +} +inherits(Session, EventEmitter); + + +function AuthContext(stream, username, service, method, cb) { + EventEmitter.call(this); + + var self = this; + + this.username = this.user = username; + this.service = service; + this.method = method; + this._initialResponse = false; + this._finalResponse = false; + this._multistep = false; + this._cbfinal = function(allowed, methodsLeft, isPartial) { + if (!self._finalResponse) { + self._finalResponse = true; + cb(self, allowed, methodsLeft, isPartial); + } + }; + this._stream = stream; +} +inherits(AuthContext, EventEmitter); +AuthContext.prototype.accept = function() { + this._cleanup && this._cleanup(); + this._initialResponse = true; + this._cbfinal(true); +}; +AuthContext.prototype.reject = function(methodsLeft, isPartial) { + this._cleanup && this._cleanup(); + this._initialResponse = true; + this._cbfinal(false, methodsLeft, isPartial); +}; + +var RE_KBINT_SUBMETHODS = /[ \t\r\n]*,[ \t\r\n]*/g; +function KeyboardAuthContext(stream, username, service, method, submethods, cb) { + AuthContext.call(this, stream, username, service, method, cb); + this._multistep = true; + + var self = this; + + this._cb = undefined; + this._onInfoResponse = function(responses) { + if (self._cb) { + var callback = self._cb; + self._cb = undefined; + callback(responses); + } + }; + this.submethods = submethods.split(RE_KBINT_SUBMETHODS); + this.on('abort', function() { + self._cb && self._cb(new Error('Authentication request aborted')); + }); +} +inherits(KeyboardAuthContext, AuthContext); +KeyboardAuthContext.prototype._cleanup = function() { + this._stream.removeListener('USERAUTH_INFO_RESPONSE', this._onInfoResponse); +}; +KeyboardAuthContext.prototype.prompt = function(prompts, title, instructions, + cb) { + if (!Array.isArray(prompts)) + prompts = [ prompts ]; + + if (typeof title === 'function') { + cb = title; + title = instructions = undefined; + } else if (typeof instructions === 'function') { + cb = instructions; + instructions = undefined; + } + + for (var i = 0; i < prompts.length; ++i) { + if (typeof prompts[i] === 'string') { + prompts[i] = { + prompt: prompts[i], + echo: true + }; + } + } + + this._cb = cb; + this._initialResponse = true; + this._stream.once('USERAUTH_INFO_RESPONSE', this._onInfoResponse); + + return this._stream.authInfoReq(title, instructions, prompts); +}; + +function PKAuthContext(stream, username, service, method, pkInfo, cb) { + AuthContext.call(this, stream, username, service, method, cb); + + this.key = { algo: pkInfo.keyAlgo, data: pkInfo.key }; + this.signature = pkInfo.signature; + var sigAlgo; + if (this.signature) { + switch (pkInfo.keyAlgo) { + case 'ssh-rsa': + sigAlgo = 'RSA-SHA1'; + break; + case 'ssh-dss': + sigAlgo = 'DSA-SHA1'; + break; + case 'ecdsa-sha2-nistp256': + sigAlgo = 'sha256'; + break; + case 'ecdsa-sha2-nistp384': + sigAlgo = 'sha384'; + break; + case 'ecdsa-sha2-nistp521': + sigAlgo = 'sha512'; + break; + } + } + this.sigAlgo = sigAlgo; + this.blob = pkInfo.blob; +} +inherits(PKAuthContext, AuthContext); +PKAuthContext.prototype.accept = function() { + if (!this.signature) { + this._initialResponse = true; + this._stream.authPKOK(this.key.algo, this.key.data); + } else + AuthContext.prototype.accept.call(this); +}; + +function HostbasedAuthContext(stream, username, service, method, pkInfo, cb) { + AuthContext.call(this, stream, username, service, method, cb); + + this.key = { algo: pkInfo.keyAlgo, data: pkInfo.key }; + this.signature = pkInfo.signature; + var sigAlgo; + if (this.signature) { + switch (pkInfo.keyAlgo) { + case 'ssh-rsa': + sigAlgo = 'RSA-SHA1'; + break; + case 'ssh-dss': + sigAlgo = 'DSA-SHA1'; + break; + case 'ecdsa-sha2-nistp256': + sigAlgo = 'sha256'; + break; + case 'ecdsa-sha2-nistp384': + sigAlgo = 'sha384'; + break; + case 'ecdsa-sha2-nistp521': + sigAlgo = 'sha512'; + break; + } + } + this.sigAlgo = sigAlgo; + this.blob = pkInfo.blob; + this.localHostname = pkInfo.localHostname; + this.localUsername = pkInfo.localUsername; +} +inherits(HostbasedAuthContext, AuthContext); + +function PwdAuthContext(stream, username, service, method, password, cb) { + AuthContext.call(this, stream, username, service, method, cb); + + this.password = password; +} +inherits(PwdAuthContext, AuthContext); + + +function openChannel(self, type, opts, cb) { + // ask the client to open a channel for some purpose + // (e.g. a forwarded TCP connection) + var localChan = nextChannel(self); + var initWindow = Channel.MAX_WINDOW; + var maxPacket = Channel.PACKET_SIZE; + var ret = true; + + if (localChan === false) + return cb(new Error('No free channels available')); + + if (typeof opts === 'function') { + cb = opts; + opts = {}; + } + + self._channels[localChan] = true; + + var sshstream = self._sshstream; + sshstream.once('CHANNEL_OPEN_CONFIRMATION:' + localChan, function(info) { + sshstream.removeAllListeners('CHANNEL_OPEN_FAILURE:' + localChan); + + var chaninfo = { + type: type, + incoming: { + id: localChan, + window: initWindow, + packetSize: maxPacket, + state: 'open' + }, + outgoing: { + id: info.sender, + window: info.window, + packetSize: info.packetSize, + state: 'open' + } + }; + cb(undefined, new Channel(chaninfo, self, { server: true })); + }).once('CHANNEL_OPEN_FAILURE:' + localChan, function(info) { + sshstream.removeAllListeners('CHANNEL_OPEN_CONFIRMATION:' + localChan); + + delete self._channels[localChan]; + + var err = new Error('(SSH) Channel open failure: ' + info.description); + err.reason = info.reason; + err.lang = info.lang; + cb(err); + }); + + if (type === 'forwarded-tcpip') + ret = sshstream.forwardedTcpip(localChan, initWindow, maxPacket, opts); + else if (type === 'x11') + ret = sshstream.x11(localChan, initWindow, maxPacket, opts); + else if (type === 'forwarded-streamlocal@openssh.com') { + ret = sshstream.openssh_forwardedStreamLocal(localChan, + initWindow, + maxPacket, + opts); + } + + return ret; +} + +function nextChannel(self) { + // get the next available channel number + + // fast path + if (self._curChan < MAX_CHANNEL) + return ++self._curChan; + + // slower lookup path + for (var i = 0, channels = self._channels; i < MAX_CHANNEL; ++i) + if (!channels[i]) + return i; + + return false; +} + + +Server.createServer = function(cfg, listener) { + return new Server(cfg, listener); +}; +Server.KEEPALIVE_INTERVAL = 1000; +Server.KEEPALIVE_CLIENT_INTERVAL = 15000; +Server.KEEPALIVE_CLIENT_COUNT_MAX = 3; + +module.exports = Server; diff --git a/reverse_engineering/node_modules/ssh2/lib/utils.js b/reverse_engineering/node_modules/ssh2/lib/utils.js new file mode 100644 index 0000000..f513b95 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/lib/utils.js @@ -0,0 +1,5 @@ +exports.spliceOne = function(list, index) { + for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1) + list[i] = list[k]; + list.pop(); +}; diff --git a/reverse_engineering/node_modules/ssh2/package.json b/reverse_engineering/node_modules/ssh2/package.json new file mode 100644 index 0000000..e3a3f4d --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/package.json @@ -0,0 +1,74 @@ +{ + "_args": [ + [ + "ssh2@0.5.4", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "ssh2@0.5.4", + "_id": "ssh2@0.5.4", + "_inBundle": false, + "_integrity": "sha1-G/a2soyW6u8mf01sRqWiUXpZnic=", + "_location": "/ssh2", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "ssh2@0.5.4", + "name": "ssh2", + "escapedName": "ssh2", + "rawSpec": "0.5.4", + "saveSpec": null, + "fetchSpec": "0.5.4" + }, + "_requiredBy": [ + "/tunnel-ssh" + ], + "_resolved": "https://registry.npmjs.org/ssh2/-/ssh2-0.5.4.tgz", + "_spec": "0.5.4", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Brian White", + "email": "mscdex@mscdex.net" + }, + "bugs": { + "url": "https://github.com/mscdex/ssh2/issues" + }, + "dependencies": { + "ssh2-streams": "~0.1.15" + }, + "description": "SSH2 client and server modules written in pure JavaScript for node.js", + "devDependencies": { + "semver": "^5.1.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "homepage": "https://github.com/mscdex/ssh2#readme", + "keywords": [ + "ssh", + "ssh2", + "sftp", + "secure", + "shell", + "exec", + "remote", + "client" + ], + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/mscdex/ssh2/raw/master/LICENSE" + } + ], + "main": "./lib/client", + "name": "ssh2", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/mscdex/ssh2.git" + }, + "scripts": { + "test": "node test/test.js" + }, + "version": "0.5.4" +} diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/bad_rsa_private_key b/reverse_engineering/node_modules/ssh2/test/fixtures/bad_rsa_private_key new file mode 100644 index 0000000..80fdc87 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/bad_rsa_private_key @@ -0,0 +1,26 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAz7MF4vhgw6HxNf3KtVf3VULTYgrRSlv+cCZdB1xxI1p/nGyu +/eekUn5C+mGeDS488DX5ulzicxVpL7pamc/tFNcp91MrR7PiIMK2l+bwbZJubbLj +DHhNcBklnFOSKxtmQRfuorGakpy/kXmIxF5of0xXGns6DlHRq9dGCJIXvrkqhcEb +k4n2y4aV4VOiMHdo6FrFQVPzA8DlbJP2SjIFZ/0VdK7O7eiyiqV1p1xlbTQQ5rAX +LdsshBn/GvoBOTCVupMXurn2582vgGh26Mmovj2QGzScMGUVttkMlnxUmKT/aQka +mC0vR54QOW7lyWPjAitOV0qgmtGm3/cl7W7NjwIDAQABAoIBAFxH0C+951BEXWV9 +s1jLEqshG8YNxFtjcDLn+KFSoznv9Y7MgxtwlgPI8X1Jbe2xQ4X+lUwGBN7Y/nkk +NSjtxwphZtXqb+pVs/yWRoZLJzunucSnnFVoBg/uPFWuk9zvOYlmVrKWcnT9i+fY +tbl5sLgOdQzg/zRpidztssIQFti3o2jnpyrEGcepPWLkfCgqPfGmNv78BAIt/6iT +zYDB4GMSq/LnPTIOFsIOvlkZg3RCcLWeAPRC+lvFQVY+M/uJL5WIbA5il1IMMKH7 +MULWpRO3lnb1JVrkZlBldK5uew6AN3tHDQOmg+C2JuIbOZ35J9dcnwsE+IptWWBj +XiFRJCECgYEA8BeuufkslureqOycaPLMkqchMTue1OxbLJFvPN+dh/cW6Lng3b8+ +xAyzZrc0vccH/jl9WVHhIZ7TcKXDzSmmrtnZ/3m1c4gANGqIPwO+emL1ZzzkIKGd +FrLeBZKP4TWry9kjg4cG1SKGpcB5ngJMPXUxMZNe74tC4Hk820PkFjcCgYEA3XXn +ngRCgH9N1eKSD2daxxlBhTTSnTgjU+dDaDFQzPIhJCcS8HwyQBQmNTOSXXK9sShC +fdXAsmiBby5WEBq/K5+cXeDG2ZlFLyPovEgTUrLgraw42PYs0+A8Ls7dFk7PuMez +3G2gUPkY039JiyXKfcog9/dIRfbWCwzQ6s7TV2kCgYEArsme81cahhgg1zvCNokk +M1Omz2/HFt2nFpAeOmPVDGnu7Kh9sxGKgTF53bpclBh0kjiKL99zFYXKCoUzQYYk +CcEhemLBnYUSGRbBb5arMfAfFfR3Y+YkNaUsC0SCqILpOfMvbo57g+ipu7ufDlA/ +7rIFiUDvaVap7j909W+8egsCgYEAsuc/0DBixMmSyHl7QwRcmkC15HVSu32RVIOb +ub01KAtmaH1EWJAMTCW64/mggOtjgI0kgeE/BSFVhsqo7eOdkhEj0db27OxbroRU +zF1xdrpYtRRO7D6a4iLgm3OzuQS72+tASo8pFqDUxG6sq8NAvLOgRJE4ioSoT07w +KvAgXRkCgYEAmWgcsX/BdNcKOteSDtPkys5NRtWCBz7Coxb+xXXoXz1FVegBolpY +wXVePvXTIbU8VJOLunMyH5wpmMUiJbTX9v2o/yfpsH0ci4GaAeVtqpA= +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/id_dsa b/reverse_engineering/node_modules/ssh2/test/fixtures/id_dsa new file mode 100644 index 0000000..d9c9b5b --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/id_dsa @@ -0,0 +1,12 @@ +-----BEGIN DSA PRIVATE KEY----- +MIIBuwIBAAKBgQC3/2VIGHgqHuxvhPa6rryqqLy6sQmjeSIwyrIW5F/o8W4sz/mE +0noDSW4PaoXjgPQv5egj1EByws6dMOUqLaZHNWNn+Lh/jkKlwKyhbSCAjqoWH3v3 +uI1j58GO/eZ2+REijfyA0XJxdm7kqEexxbg0UpFr1F/eLBUxpLIbhhS1cwIVAKcB +B9DnAObuPJGTwYTCaIIBQDy9AoGAJicW0pIFwgoTYsIeywmUQopJ3FQ4M3eDwQ0U +T33pzWvBZFN2OsUDTFg64PNm9ow09wk042qMg168eKCUTp2iR/Y9R4xTj8dls8iv +aMGMZ/B32eURIjUREGiXYTyG1pfuB2znSvr/5pavhuz5yG9M0AJCiYiexdaQKO3N +oJp6T3ACgYEAsep79p4WljnawrJc928zGq6dLYjs+5apYhqx4vf2l3Z2u26VqVNG +i5zZkUzhWQYV3/qtEOpO43dyZTHW+d9L8ni6HbXFWRVx60WE+5WKkzkimHJ6gox2 +kDvOqPudiS34KJOCEYYLEnJmK8aUZBZzWFORXkN8QgA/h9ts8AU785UCFAVXZMWq +CteWCH2HzcY2x/65dMwL +-----END DSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/id_ecdsa b/reverse_engineering/node_modules/ssh2/test/fixtures/id_ecdsa new file mode 100644 index 0000000..036e3b6 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/id_ecdsa @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIPMZuWP7fMsZeyC1XXVUALVebJOX7PTwmsPql9qG25SeoAoGCCqGSM49 +AwEHoUQDQgAEB/B6mC5lrekKPWfGEkKpnCk08+dRnzFUg2jUHpaIrOTt4jGdvq6T +yAN57asB+PYmFyVIpi35NcmicF18qX3ayg== +-----END EC PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa b/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa new file mode 100644 index 0000000..90a6f72 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQDL0yFO4W4xbdrJk/i+CW3itPATvhRkS+x+gKmkdH739AqWYP6r +kTFAmFTw9gLJ/c2tN7ow0T0QUR9iUsv/3QzTuwsjBu0feo3CVxwMkaJTo5ks9XBo +OW0R3tyCcOLlAcQ1WjC7cv5Ifn4gXLLM+k8/y/m3u8ERtidNxbRqpQ/gPQIDAQAB +AoGABirSRC/ABNDdIOJQUXe5knWFGiPTPCGr+zvrZiV8PgZtV5WBvzE6e0jgsRXQ +icobMhWQla+PGHJL786vi4NlwuhwKcF7Pd908ofej1eeBOd1u/HQ/qsfxPdxI0zF +dcWPYgAOo9ydOMGcSx4v1zDIgFInELJzKbv64LJQD0/xhoUCQQD7KhJ7M8Nkwsr2 +iKCyWTFM2M8/VKltgaiSmsNKZETashk5tKOrM3EWX4RcB/DnvHe8VNyYpC6Sd1uQ +AHwPDfxDAkEAz7+7hDybH6Cfvmr8kUOlDXiJJWXp5lP37FLzMDU6a9wTKZFnh57F +e91zRmKlQTegFet93MXaFYljRkI+4lMpfwJBAPPLbNEF973Qjq4rBMDZbs9HDDRO ++35+AqD7dGC7X1Jg2bd3rf66GiU7ZgDm/GIUQK0gOlg31bT6AniO39zFGH0CQFBh +Yd9HR8nT7xrQ8EoQPzNYGNBUf0xz3rAcZCWZ4rHK48sojEMoBkbnputrzX7PU+xH +QlqCXuAIWVXc2dHd1WcCQQDIUJHPOsgeAfTLoRRRURp/m8zZ9IpbaPTyDstPVNYe +zARW3Oa/tzPqdO6NWaetCp17u7Kb6X9np7Vz17i/4KED +-----END RSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa.ppk b/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa.ppk new file mode 100644 index 0000000..4504f18 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa.ppk @@ -0,0 +1,26 @@ +PuTTY-User-Key-File-2: ssh-rsa +Encryption: none +Comment: rsa-key-20150522 +Public-Lines: 6 +AAAAB3NzaC1yc2EAAAABJQAAAQB1quqP0rhl78NOLD4lj+1x5FGAqZ3aqo6GiEPz +KOaQmy86FuJMK0nHj3gUKTa/Kvaa+8PZyeu+uVseHg47YrynCOcJEEnpqvbArc8M +xMWuUnTUMrjvokGDOBBiQu4UAE4bybpgXkNHJfbrcDVgivmv3Ikn8PVIZ1rLBMLZ +6Lzn0rjPjFD0X4WqsAJW2SFiZnsjMZtVL2TWadNTyyfjjm2NCRBvd32VLohkSe9Q +BZBD6MW8YQyBKUnEF/7WNY0eehDVrfx1YqPOV1bDwFUhRaAYpLDLDR0KCAPvx7qb +8G5Cq0TIBsEr3H8ztNRcOTQoaKgn0T18M7cyS4ykoNLYW4Zx +Private-Lines: 14 +AAABACyF3DZraF3sBLXLjSL4MFSblHXfUHxAiPSiQzlpa/9dUCPRTrUJddzOgHZU +yJtcXU9mLm4VDRe7wZyxbSs6Hd5WZUGzIuLLEUH8k4hKdE/MLDSdkhV7qhX5iaij +tAeRaammRoVUGXTd7rnzGx2cXnnkvkZ22VmqkQ6MLg1DTmWNfOO9cdwFGdQawf/n +yUV0nTkWsHXy5Qrozq9wRFk8eyw+pFllxqavsNftZX8VDiQt27JLZPTU4LGkH660 +3gq1KhNS/l05TlXnMZGjlcPN8UEaBzmCWRezhJSttjs5Kgp1K3yDf4ozMR/HWOCj +Jq8fd3VIgli6ML8yjr/c0A0T9MUAAACBAL1/byxHiCvY/2C+/L5T+ZZq13jdZuYK +MmOFaNITgEdNGWSIFYRzhLKGXj7awQWOIW6chj470GNOfQjFL1TvXhbwfqW6esDa +kETOYQPYQHZijABcn7uurMUm/bu5x/z9gYkAfniOCI5vmvMvJ09JcZ0iUmFWDZZY +fAutBvrt+n/vAAAAgQCe9jrA51wn1/wzKmWF+2+OWFUG9usheIcEbHB8mxLguLfU ++x4i+2vLo0FtXEPAw+Bt7Tge4t0m6USiVZXtW/QKsh0kMj4mNVHFz+XXw4l1QOYv +n5TjnLepiP7majXv4GHI2eOcHkyly4sIkj4jNLYqvT86hMxW4IC+jtJEWhn/nwAA +AIEAlJ8cExu2WrWukTDJQHrVegtvdJUhNjol2wLucPuWwSxKuB8FHYwaPRYRkf3d +DkZ53hhjJZ0BVkAaQ28uqM09xKD+q1H4/r0nnbtlV4uHLl3cCD5mGrH8I/iDPJX4 +fFIqCa0+n1D6RzvDqs1QIu+PGSp0K6vHOOS5fP0ZpuT025E= +Private-MAC: 4ca26008c85b901f4d2766b0924c25e527678d7e diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa_enc b/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa_enc new file mode 100644 index 0000000..75a1e95 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/id_rsa_enc @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,CCE70744FB28F2EFB1D74377281A780C + +1WiGnqpSGXFIg+WYr7T2XN72C1YrNQ1jmRISb32TB/Rh2Zo47fjyQnv9impz8b6m +91R/qF7uCLI0fswvT5oqwn1L0vUAA0YtW/E2IQJEx5GPiaexoDJYYfu2yy036Kca +e9VtCajgrV+kycg1CknCxQKMcKXNq8Czvq66PM4Bzknek5hhdmxHxOl0QAE+8EXt +pnasOGz3szTUKkD6givwWgvDXY3BnVG46fXff99Xqgb6fx5IDbAkVKaxWIN/c81E +b0rcfyoLb7yjPgNYn9vUI6Z+24NMYUYARzb3dG5geaeX0BYb/VlCtJUsP0Rp2P2P +jl+cdvBKaeOvA9gPo/jAtSOFexQRs7AzKzoOLYU1fokd8HhqxOKAljn9ujmEqif7 +qcimk2s7ff6tSSlxtRzDP+Uq9d1u5tyaONRV2lwj+GdP1gRoOmdZL5chdvoAi0I8 +5eMf58hEuN2d4h4FryO6z7K+XQ9oo6/N/xHU0U/t2Pco9oY2L6oWMDxKwbfPhaD5 +CcoEElsK4XFArYDielEq9Y1sXaEuwR5I0ksDDsANp74r9Bhcqz60gJa6hVz0ouEU +QA67wV9+TRmulKRxwANvqxQwqPuxqcTPeJjXSUN/ZCaDwYmI+d1poxMx2fQzT82M +onlgOWq+3HbCotyoeFpCameymwDQzmrYdMBr7oWLgnOrxmJ89zDc6+jkHFgQJvnU +atyeVDqe866ZvvIGWS+r/EsDjV3cTW/cJvdsC+5BpnoXoVF4LqxE3LFbEbQBvqio +4enCZpspQSMOJra37vSofbD+DyI5Wd+y8SBmfDLjyDFhT0spW9aN99uFqSc3UElA +SAmnFmpYBFEQrRGpvpu5sC0c/YjZeRXr0/F1xPpIT1SWzpRsbcsWRBDzWjLOKWQx +8ytwc2QS7eKedfqkPWpYKW0Qtps+XgnGWA6PBX42IYhLsKANRfhFXQv5LPqLNNOn +3EsG9pd+0dBpfxFQfyyAKAUuvpJNgJ6kNx8VSj8Ppj8lyUdGa9YucgB02m7gHC9U +A4YyJsIcjo6IcrjM+ez1govRRS0nE8AUb8ups9tn8mdBwqcPCrgcJhV7JkOYNJYh +NAh0vgmneOq8LSVs2SRaL3uuLNbjh1LR9iViwbIY8kMQXkiXa2/V+PFwt5oqeX5f +2x3yzCeGBiQW10InyBBnKutbPD85R4YJhQ55bOMDSFfGGqwOU1QURiO1NUzf9n/2 ++E8VE7J/IQoO0TrJpC+EV0ROKME9W6+AvEFdmdIigbq3bkdEgSixyLnrhV8V8T4N +nbKlLoqfXt8DmT+h8XPzgsu0Fq/PNi6xBaiUsaN9tK6OP2ZVjr9ihbeLTI0rcKDr +XX2cWPvTcboRLt+S4wmqchMf7Kxa2PfX5Tf+KCcdZNQO4YqS23wQZgk61kuOQCsS +uOop+ICI7yWZkjqCOzGOeHLl/7FyFeprsFDIwD1g20y9bzibbJlbQPhwXSalqDQT +MWLH3rdFuvgLH7ujtjxSakES+VzkOhbnmb/Wypbl1D7P7GT2seau16EEGQDhDzcJ +Q4d/BjR2WqqxmC79MOAvUWAu6fZQjPD30/gYPGpMaEuiLrDlzDqvf+oi4A9+EtRL +-----END RSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_dsa_key b/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_dsa_key new file mode 100644 index 0000000..5448947 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_dsa_key @@ -0,0 +1,12 @@ +-----BEGIN DSA PRIVATE KEY----- +MIIBuwIBAAKBgQDEK+daQ7RuajwxkmBmogb0iUSi/w2RYKuvC2EiviBu3S2s9Bfq +gROKscAnURrxpTOa+iYeI7hRzfuX0qFmnFwXIjKJBjqBdg9r76UR5UNytnWQkJ5x +lxsZThMeAMw38SvmRMw15kkgxycKGqu4yvNLGyVwN02bPVjLcEVLWLCM1wIVAK50 +5JqF0nmGXFkcmNtxR24/mNXTAoGBAIc2p8C8b08OTQPmfZI+Wq8a+CuEr5R36bMW +TAs5etqmO2aVo5zvR0MnTjoS2ZDbuznDG9RiSuIB+ivr/daEwi+K+Ha8pZfYjXCG +ldzvmr5I4x8rkH3zyn7BADnc+/q3pa8AnZvTme5eNsxn1Pu/rmC/8KKnhmzRggqP +N8ORhoQQAoGAMCvoMcsDAui2d/WVpgHZZEFlxfbf4dPUPYb5zf2xOiMG9OK+Cbv3 +NaLZwk/Hd2g4L3nwTKDASxfmRcrbuaOg/d7aDjQ2mJz18Js4IjY34QpgLspGCNX/ +6rJSQ+ov1Z2Etr95N4Tzm3qpxW5BH9TTgaC/ntb9NRqIzNPCvAHXmlcCFBxgZpyb +4GUgmqhTOMtmBkJ7QpL9 +-----END DSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_ecdsa_key b/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_ecdsa_key new file mode 100644 index 0000000..0476442 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_ecdsa_key @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEICrdbIIYmW/XTK9hxaQZZ56IGwG0NhqD2eppYUJNZsECoAoGCCqGSM49 +AwEHoUQDQgAEa+MuLv++3ft5HPFIsM2hQnmHPF12q08/MaHoGud4yqp3evyomjZN +xbsSb39fv8t6XX1u1rm5oHQcBV5Mqomaeg== +-----END EC PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_rsa_key b/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_rsa_key new file mode 100644 index 0000000..9c2cc6f --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/fixtures/ssh_host_rsa_key @@ -0,0 +1,15 @@ +-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQC57UB/5H0M+t+mopksrltCCIXghryzofJjau+8tuMT9CG6ta3S +O9aKApJUUG/xtc88giVhB7HFABX/oob+jrkSthR8s/whULC8E+GhvOBjHydRUZIs +aPYOMBb42HcbOsgq3li/hwOcDk0vY00hZDKCum9BgvRAb7dPEkw2dmiCQQIDAQAB +AoGAMG+HOwoaLbR5aR64yrQNYBF6Vvii1iUdURr9o2r9kygpVUuZIcim5kMvPbnK +v+w+NaQt+q4XeJvCH1uG0W/69FwnphfaOVmCCUtsoJ6sU3fWr9x59MtKL2Llh8xR +50lz6R+eDXoYRDq245hG9BFn/bu0vtqQqx06mlZJcjaRocECQQDjdYFmr+DSww3x +VNx0G0DUkaQZZ+iqZiT3Zund2pcBB4aLiewOrqj0GFct4+YNzgxIXPejmS0eSokN +N2lC3NxZAkEA0UGjN5TG5/LEK3zcYtx2kpXryenrYORo1n2L/WPMZ0mjLQyd4LJr +ibfgVUfwX/kV3vgGYLwjpgcaTiMsecv4KQJAYMmMgZSPdz+WvD1e/WznXkyG5mSn +xXJngnrhQw0TulVodBIBR5IcxJli510VdIRcB6K/oXa5ky0mOmB8wv3WKQJBAKEF +PxE//KbzWhyUogm4180IbD4dMDCI0ltqlFRRfTJlqZi6wqnq4XFB+u/kwYU4aKoA +dPfvDgduI8HIsyqt17ECQDI/HC8PiYsDIOyVpQuQdIAsbGmoavK7X1MVEWR2nj9t +7BbUVFSnVKynL4TWIJZ6xP8WQwkDBQc5WjognHDaUTQ= +-----END RSA PRIVATE KEY----- diff --git a/reverse_engineering/node_modules/ssh2/test/test-client-server.js b/reverse_engineering/node_modules/ssh2/test/test-client-server.js new file mode 100644 index 0000000..f8d3854 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/test-client-server.js @@ -0,0 +1,1902 @@ +var Client = require('../lib/client'); +var Server = require('../lib/server'); +var OPEN_MODE = require('ssh2-streams').SFTPStream.OPEN_MODE; +var STATUS_CODE = require('ssh2-streams').SFTPStream.STATUS_CODE; +var utils = require('ssh2-streams').utils; + +var semver = require('semver'); + +var net = require('net'); +var fs = require('fs'); +var crypto = require('crypto'); +var path = require('path'); +var join = path.join; +var inspect = require('util').inspect; +var assert = require('assert'); + +var t = -1; +var group = path.basename(__filename, '.js') + '/'; +var fixturesdir = join(__dirname, 'fixtures'); + +var USER = 'nodejs'; +var PASSWORD = 'FLUXCAPACITORISTHEPOWER'; +var MD5_HOST_FINGERPRINT = '64254520742d3d0792e918f3ce945a64'; +var KEY_RSA_BAD = fs.readFileSync(join(fixturesdir, 'bad_rsa_private_key')); +var HOST_KEY_RSA = fs.readFileSync(join(fixturesdir, 'ssh_host_rsa_key')); +var HOST_KEY_DSA = fs.readFileSync(join(fixturesdir, 'ssh_host_dsa_key')); +var HOST_KEY_ECDSA = fs.readFileSync(join(fixturesdir, 'ssh_host_ecdsa_key')); +var CLIENT_KEY_ENC_RSA = fs.readFileSync(join(fixturesdir, 'id_rsa_enc')); +var CLIENT_KEY_ENC_RSA_PUB = utils.parseKey(CLIENT_KEY_ENC_RSA); +utils.decryptKey(CLIENT_KEY_ENC_RSA_PUB, 'foobarbaz'); +CLIENT_KEY_ENC_RSA_PUB = utils.genPublicKey(CLIENT_KEY_ENC_RSA_PUB); +var CLIENT_KEY_PPK_RSA = fs.readFileSync(join(fixturesdir, 'id_rsa.ppk')); +var CLIENT_KEY_PPK_RSA_PUB = utils.parseKey(CLIENT_KEY_PPK_RSA); +var CLIENT_KEY_RSA = fs.readFileSync(join(fixturesdir, 'id_rsa')); +var CLIENT_KEY_RSA_PUB = utils.genPublicKey(utils.parseKey(CLIENT_KEY_RSA)); +var CLIENT_KEY_DSA = fs.readFileSync(join(fixturesdir, 'id_dsa')); +var CLIENT_KEY_DSA_PUB = utils.genPublicKey(utils.parseKey(CLIENT_KEY_DSA)); +if (semver.gte(process.version, '5.2.0')) { + var CLIENT_KEY_ECDSA = fs.readFileSync(join(fixturesdir, 'id_ecdsa')); + var CLIENT_KEY_ECDSA_PUB = utils.genPublicKey( + utils.parseKey(CLIENT_KEY_ECDSA) + ); +} +var DEBUG = false; + +var tests = [ + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + privateKey: CLIENT_KEY_RSA + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'publickey', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-rsa', + makeMsg('Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_RSA_PUB.public, + ctx.key.data, + makeMsg('Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('RSA-SHA1'); + var pem = CLIENT_KEY_RSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg('Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with an RSA key' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + privateKey: CLIENT_KEY_ENC_RSA, + passphrase: 'foobarbaz', + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'publickey', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-rsa', + makeMsg('Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_ENC_RSA_PUB.public, + ctx.key.data, + makeMsg('Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('RSA-SHA1'); + var pem = CLIENT_KEY_ENC_RSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg('Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with an encrypted RSA key' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + privateKey: CLIENT_KEY_PPK_RSA + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'publickey', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-rsa', + makeMsg('Unexpected key algo: ' + ctx.key.algo)); + if (ctx.signature) { + var verifier = crypto.createVerify('RSA-SHA1'); + var pem = CLIENT_KEY_PPK_RSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg('Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with an RSA key (PPK)' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + privateKey: CLIENT_KEY_DSA + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'publickey', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-dss', + makeMsg('Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_DSA_PUB.public, + ctx.key.data, + makeMsg('Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('DSA-SHA1'); + var pem = CLIENT_KEY_DSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg('Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with a DSA key' + }, + { run: function() { + if (semver.lt(process.version, '5.2.0')) + return next(); + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + privateKey: CLIENT_KEY_ECDSA + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'publickey', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ecdsa-sha2-nistp256', + makeMsg('Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_ECDSA_PUB.public, + ctx.key.data, + makeMsg('Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('sha256'); + var pem = CLIENT_KEY_ECDSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg('Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with a ECDSA key' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: 'asdf', + algorithms: { + serverHostKey: ['ssh-dss'] + } + }, + { hostKeys: [HOST_KEY_DSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === 'asdf', + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Server with DSA host key' + }, + { run: function() { + if (semver.lt(process.version, '5.2.0')) + return next(); + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: 'asdf' + }, + { hostKeys: [HOST_KEY_ECDSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === 'asdf', + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Server with ECDSA host key' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: 'asdf', + algorithms: { + serverHostKey: 'ssh-rsa' + } + }, + { hostKeys: [HOST_KEY_RSA, HOST_KEY_DSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === 'asdf', + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Server with multiple host keys (RSA selected)' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: 'asdf', + algorithms: { + serverHostKey: 'ssh-dss' + } + }, + { hostKeys: [HOST_KEY_RSA, HOST_KEY_DSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === 'asdf', + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Server with multiple host keys (DSA selected)' + }, + { run: function() { + var client; + var server; + var r; + var hostname = 'foo'; + var username = 'bar'; + + r = setup( + this, + { username: USER, + privateKey: CLIENT_KEY_RSA, + localHostname: hostname, + localUsername: username + }, + { hostKeys: [ HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + if (ctx.method === 'hostbased') { + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-rsa', + makeMsg('Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_RSA_PUB.public, + ctx.key.data, + makeMsg('Public key mismatch')); + assert(ctx.signature, + makeMsg('Expected signature')); + assert(ctx.localHostname === hostname, + makeMsg('Wrong local hostname')); + assert(ctx.localUsername === username, + makeMsg('Wrong local username')); + var verifier = crypto.createVerify('RSA-SHA1'); + var pem = CLIENT_KEY_RSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg('Could not verify hostbased signature')); + ctx.accept(); + } else + ctx.reject(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with hostbased' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === PASSWORD, + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Authenticate with a password' + }, + { run: function() { + var client; + var server; + var r; + var verified = false; + + r = setup( + this, + { username: USER, + password: PASSWORD, + hostHash: 'md5', + hostVerifier: function(hash) { + assert(hash === MD5_HOST_FINGERPRINT, + makeMsg('Host fingerprint mismatch')); + return (verified = true); + } + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }).on('close', function() { + assert(verified, makeMsg('Failed to verify host fingerprint')); + }); + }, + what: 'Verify host fingerprint' + }, + { run: function() { + var client; + var server; + var r; + var out = ''; + var outErr = ''; + var exitArgs; + var closeArgs; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(); + session.once('exec', function(accept, reject, info) { + assert(info.command === 'foo --bar', + makeMsg('Wrong exec command: ' + info.command)); + var stream = accept(); + stream.stderr.write('stderr data!\n'); + stream.write('stdout data!\n'); + stream.exit(100); + stream.end(); + conn.end(); + }); + }); + }); + }); + client.on('ready', function() { + client.exec('foo --bar', function(err, stream) { + assert(!err, makeMsg('Unexpected exec error: ' + err)); + stream.on('data', function(d) { + out += d; + }).on('exit', function(code) { + exitArgs = new Array(arguments.length); + for (var i = 0; i < exitArgs.length; ++i) + exitArgs[i] = arguments[i]; + }).on('close', function(code) { + closeArgs = new Array(arguments.length); + for (var i = 0; i < closeArgs.length; ++i) + closeArgs[i] = arguments[i]; + }).stderr.on('data', function(d) { + outErr += d; + }); + }); + }).on('end', function() { + assert.deepEqual(exitArgs, + [100], + makeMsg('Wrong exit args: ' + inspect(exitArgs))); + assert.deepEqual(closeArgs, + [100], + makeMsg('Wrong close args: ' + inspect(closeArgs))); + assert(out === 'stdout data!\n', + makeMsg('Wrong stdout data: ' + inspect(out))); + assert(outErr === 'stderr data!\n', + makeMsg('Wrong stderr data: ' + inspect(outErr))); + }); + }, + what: 'Simple exec' + }, + { run: function() { + var client; + var server; + var r; + var out = ''; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(), + env = {}; + session.once('env', function(accept, reject, info) { + env[info.key] = info.val; + accept && accept(); + }).once('exec', function(accept, reject, info) { + assert(info.command === 'foo --bar', + makeMsg('Wrong exec command: ' + info.command)); + var stream = accept(); + stream.write(''+env.SSH2NODETEST); + stream.exit(100); + stream.end(); + conn.end(); + }); + }); + }); + }); + client.on('ready', function() { + client.exec('foo --bar', + { env: { SSH2NODETEST: 'foo' } }, + function(err, stream) { + assert(!err, makeMsg('Unexpected exec error: ' + err)); + stream.on('data', function(d) { + out += d; + }); + }); + }).on('end', function() { + assert(out === 'foo', + makeMsg('Wrong stdout data: ' + inspect(out))); + }); + }, + what: 'Exec with environment set' + }, + { run: function() { + var client; + var server; + var r; + var out = ''; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(); + var ptyInfo; + session.once('pty', function(accept, reject, info) { + ptyInfo = info; + accept && accept(); + }).once('exec', function(accept, reject, info) { + assert(info.command === 'foo --bar', + makeMsg('Wrong exec command: ' + info.command)); + var stream = accept(); + stream.write(JSON.stringify(ptyInfo)); + stream.exit(100); + stream.end(); + conn.end(); + }); + }); + }); + }); + var pty = { + rows: 2, + cols: 4, + width: 0, + height: 0, + term: 'vt220', + modes: {} + }; + client.on('ready', function() { + client.exec('foo --bar', + { pty: pty }, + function(err, stream) { + assert(!err, makeMsg('Unexpected exec error: ' + err)); + stream.on('data', function(d) { + out += d; + }); + }); + }).on('end', function() { + assert.deepEqual(JSON.parse(out), + pty, + makeMsg('Wrong stdout data: ' + inspect(out))); + }); + }, + what: 'Exec with pty set' + }, + { run: function() { + var client; + var server; + var r; + var out = ''; + + r = setup( + this, + { username: USER, + password: PASSWORD, + agent: '/foo/bar/baz' + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(); + var authAgentReq = false; + session.once('auth-agent', function(accept, reject) { + authAgentReq = true; + accept && accept(); + }).once('exec', function(accept, reject, info) { + assert(info.command === 'foo --bar', + makeMsg('Wrong exec command: ' + info.command)); + var stream = accept(); + stream.write(inspect(authAgentReq)); + stream.exit(100); + stream.end(); + conn.end(); + }); + }); + }); + }); + client.on('ready', function() { + client.exec('foo --bar', + { agentForward: true }, + function(err, stream) { + assert(!err, makeMsg('Unexpected exec error: ' + err)); + stream.on('data', function(d) { + out += d; + }); + }); + }).on('end', function() { + assert(out === 'true', + makeMsg('Wrong stdout data: ' + inspect(out))); + }); + }, + what: 'Exec with OpenSSH agent forwarding' + }, + { run: function() { + var client; + var server; + var r; + var out = ''; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(); + var x11 = false; + session.once('x11', function(accept, reject, info) { + x11 = true; + accept && accept(); + }).once('exec', function(accept, reject, info) { + assert(info.command === 'foo --bar', + makeMsg('Wrong exec command: ' + info.command)); + var stream = accept(); + stream.write(inspect(x11)); + stream.exit(100); + stream.end(); + conn.end(); + }); + }); + }); + }); + client.on('ready', function() { + client.exec('foo --bar', + { x11: true }, + function(err, stream) { + assert(!err, makeMsg('Unexpected exec error: ' + err)); + stream.on('data', function(d) { + out += d; + }); + }); + }).on('end', function() { + assert(out === 'true', + makeMsg('Wrong stdout data: ' + inspect(out))); + }); + }, + what: 'Exec with X11 forwarding' + }, + { run: function() { + var client; + var server; + var r; + var out = ''; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(); + var sawPty = false; + session.once('pty', function(accept, reject, info) { + sawPty = true; + accept && accept(); + }).once('shell', function(accept, reject) { + var stream = accept(); + stream.write('Cowabunga dude! ' + inspect(sawPty)); + stream.end(); + conn.end(); + }); + }); + }); + }); + client.on('ready', function() { + client.shell(function(err, stream) { + assert(!err, makeMsg('Unexpected exec error: ' + err)); + stream.on('data', function(d) { + out += d; + }); + }); + }).on('end', function() { + assert(out === 'Cowabunga dude! true', + makeMsg('Wrong stdout data: ' + inspect(out))); + }); + }, + what: 'Simple shell' + }, + { run: function() { + var client; + var server; + var r; + var expHandle = new Buffer([1, 2, 3, 4]); + var sawOpenS = false; + var sawCloseS = false; + var sawOpenC = false; + var sawCloseC = false; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.once('session', function(accept, reject) { + var session = accept(); + session.once('sftp', function(accept, reject) { + if (accept) { + var sftp = accept(); + sftp.once('OPEN', function(id, filename, flags, attrs) { + assert(id === 0, + makeMsg('Unexpected sftp request ID: ' + id)); + assert(filename === 'node.js', + makeMsg('Unexpected filename: ' + filename)); + assert(flags === OPEN_MODE.READ, + makeMsg('Unexpected flags: ' + flags)); + sawOpenS = true; + sftp.handle(id, expHandle); + sftp.once('CLOSE', function(id, handle) { + assert(id === 1, + makeMsg('Unexpected sftp request ID: ' + id)); + assert.deepEqual(handle, + expHandle, + makeMsg('Wrong sftp file handle: ' + + inspect(handle))); + sawCloseS = true; + sftp.status(id, STATUS_CODE.OK); + conn.end(); + }); + }); + } + }); + }); + }); + }); + client.on('ready', function() { + client.sftp(function(err, sftp) { + assert(!err, makeMsg('Unexpected sftp error: ' + err)); + sftp.open('node.js', 'r', function(err, handle) { + assert(!err, makeMsg('Unexpected sftp error: ' + err)); + assert.deepEqual(handle, + expHandle, + makeMsg('Wrong sftp file handle: ' + + inspect(handle))); + sawOpenC = true; + sftp.close(handle, function(err) { + assert(!err, makeMsg('Unexpected sftp error: ' + err)); + sawCloseC = true; + }); + }); + }); + }).on('end', function() { + assert(sawOpenS, makeMsg('Expected sftp open()')); + assert(sawOpenC, makeMsg('Expected sftp open() callback')); + assert(sawCloseS, makeMsg('Expected sftp open()')); + assert(sawOpenC, makeMsg('Expected sftp close() callback')); + }); + }, + what: 'Simple SFTP' + }, + { run: function() { + var client; + var server; + var state = { + readies: 0, + closes: 0 + }; + var clientcfg = { + username: USER, + password: PASSWORD + }; + var servercfg = { + hostKeys: [HOST_KEY_RSA] + }; + var reconnect = false; + + client = new Client(), + server = new Server(servercfg); + + function onReady() { + assert(++state.readies <= 4, + makeMsg('Wrong ready count: ' + state.readies)); + } + function onClose() { + assert(++state.closes <= 3, + makeMsg('Wrong close count: ' + state.closes)); + if (state.closes === 2) + server.close(); + else if (state.closes === 3) + next(); + } + + server.listen(0, 'localhost', function() { + clientcfg.host = 'localhost'; + clientcfg.port = server.address().port; + client.connect(clientcfg); + }); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', onReady); + }).on('close', onClose); + client.on('ready', function() { + onReady(); + if (reconnect) + client.end(); + else { + reconnect = true; + client.connect(clientcfg); + } + }).on('close', onClose); + }, + what: 'connect() on connected client' + }, + { run: function() { + var client = new Client({ + username: USER, + password: PASSWORD + }); + + assert.throws(function() { + client.exec('uptime', function(err, stream) { + assert(false, makeMsg('Callback unexpectedly called')); + }); + }); + next(); + }, + what: 'Throw when not connected' + }, + { run: function() { + var client; + var server; + var r; + var calledBack = 0; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }); + }); + client.on('ready', function() { + function callback(err, stream) { + assert(err, makeMsg('Expected error')); + assert(err.message === 'No response from server', + makeMsg('Wrong error message: ' + err.message)); + ++calledBack; + } + client.exec('uptime', callback); + client.shell(callback); + client.sftp(callback); + client.end(); + }).on('close', function() { + // give the callbacks a chance to execute + process.nextTick(function() { + assert(calledBack === 3, + makeMsg('Only ' + + calledBack + + '/3 outstanding callbacks called')); + }); + }); + }, + what: 'Outstanding callbacks called on disconnect' + }, + { run: function() { + var client = new Client({ + username: USER, + password: PASSWORD + }); + + assert.throws(function() { + client.exec('uptime', function(err, stream) { + assert(false, makeMsg('Callback unexpectedly called')); + }); + }); + next(); + }, + what: 'Throw when not connected' + }, + { run: function() { + var client; + var server; + var r; + var calledBack = 0; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + session.once('exec', function(accept, reject, info) { + var stream = accept(); + stream.exit(0); + stream.end(); + }); + }); + }); + }); + client.on('ready', function() { + function callback(err, stream) { + assert(!err, makeMsg('Unexpected error: ' + err)); + stream.resume(); + if (++calledBack === 3) + client.end(); + } + client.exec('foo', callback); + client.exec('bar', callback); + client.exec('baz', callback); + }).on('end', function() { + assert(calledBack === 3, + makeMsg('Only ' + + calledBack + + '/3 callbacks called')); + }); + }, + what: 'Pipelined requests' + }, + { run: function() { + var client; + var server; + var r; + var calledBack = 0; + + r = setup( + this, + { username: USER, + password: PASSWORD, + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + var reqs = []; + conn.on('session', function(accept, reject) { + if (reqs.length === 0) { + conn.rekey(function(err) { + assert(!err, makeMsg('Unexpected rekey error: ' + err)); + reqs.forEach(function(accept) { + var session = accept(); + session.once('exec', function(accept, reject, info) { + var stream = accept(); + stream.exit(0); + stream.end(); + }); + }); + }); + } + reqs.push(accept); + }); + }); + }); + client.on('ready', function() { + function callback(err, stream) { + assert(!err, makeMsg('Unexpected error: ' + err)); + stream.resume(); + if (++calledBack === 3) + client.end(); + } + client.exec('foo', callback); + client.exec('bar', callback); + client.exec('baz', callback); + }).on('end', function() { + assert(calledBack === 3, + makeMsg('Only ' + + calledBack + + '/3 callbacks called')); + }); + }, + what: 'Pipelined requests with intermediate rekeying' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + session.once('exec', function(accept, reject, info) { + var stream = accept(); + stream.exit(0); + stream.end(); + }); + }); + }); + }); + client.on('ready', function() { + client.exec('foo', function(err, stream) { + assert(!err, makeMsg('Unexpected error: ' + err)); + stream.on('exit', function(code, signal) { + client.end(); + }); + }); + }); + }, + what: 'Ignore outgoing after stream close' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + accept().on('sftp', function(accept, reject) { + var sftp = accept(); + // XXX: hack to get channel ... + var channel = sftp._readableState.pipes; + + channel.unpipe(sftp); + sftp.unpipe(channel); + + channel.exit(127); + channel.close(); + }); + }); + }); + }); + client.on('ready', function() { + var timeout = setTimeout(function() { + assert(false, makeMsg('Unexpected SFTP timeout')); + }, 1000); + client.sftp(function(err, sftp) { + clearTimeout(timeout); + assert(err, makeMsg('Expected error')); + assert(err.code === 127, + makeMsg('Expected exit code 127, saw: ' + err.code)); + client.end(); + }); + }); + }, + what: 'SFTP server aborts with exit-status' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD, + sock: new net.Socket() + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() {}); + }); + client.on('ready', function() { + client.end(); + }); + }, + what: 'Double pipe on unconnected, passed in net.Socket' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }); + conn.on('request', function(accept, reject, name, info) { + accept(); + conn.forwardOut('good', 0, 'remote', 12345, function(err, ch) { + if (err) { + assert(!err, makeMsg('Unexpected error: ' + err)); + } + conn.forwardOut('bad', 0, 'remote', 12345, function(err, ch) { + assert(err, makeMsg('Should receive error')); + client.end(); + }); + }); + }); + }); + + client.on('ready', function() { + // request forwarding + client.forwardIn('good', 0, function(err, port) { + if (err) { + assert(!err, makeMsg('Unexpected error: ' + err)); + } + }); + }); + client.on('tcp connection', function(details, accept, reject) { + accept(); + }); + }, + what: 'Client auto-rejects unrequested, allows requested forwarded-tcpip' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA], + greeting: 'Hello world!' + } + ); + client = r.client; + server = r.server; + + var sawGreeting = false; + + client.on('greeting', function(greeting) { + assert.strictEqual(greeting, 'Hello world!\r\n'); + sawGreeting = true; + }); + client.on('banner', function(message) { + assert.fail(null, null, makeMsg('Unexpected banner')); + }); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(sawGreeting, makeMsg('Client did not see greeting')); + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === PASSWORD, + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Server greeting' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA], + banner: 'Hello world!' + } + ); + client = r.client; + server = r.server; + + var sawBanner = false; + + client.on('greeting', function(greeting) { + assert.fail(null, null, makeMsg('Unexpected greeting')); + }); + client.on('banner', function(message) { + assert.strictEqual(message, 'Hello world!\r\n'); + sawBanner = true; + }); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(sawBanner, makeMsg('Client did not see banner')); + assert(ctx.method === 'password', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg('Unexpected username: ' + ctx.username)); + assert(ctx.password === PASSWORD, + makeMsg('Unexpected password: ' + ctx.password)); + ctx.accept(); + }).on('ready', function() { + conn.end(); + }); + }); + }, + what: 'Server banner' + }, + { run: function() { + var client; + var server; + var r; + var fastRejectSent = false; + + function sendAcceptLater(accept) { + if (fastRejectSent) + accept(); + else + setImmediate(sendAcceptLater, accept); + } + + r = setup( + this, + { username: USER }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }); + + conn.on('request', function(accept, reject, name, info) { + if (info.bindAddr === 'fastReject') { + // Will call reject on 'fastReject' soon + reject(); + fastRejectSent = true; + } else + // but accept on 'slowAccept' later + sendAcceptLater(accept); + }); + }); + + client.on('ready', function() { + var replyCnt = 0; + + client.forwardIn('slowAccept', 0, function(err) { + assert(!err, makeMsg('Unexpected error: ' + err)); + if (++replyCnt === 2) + client.end(); + }); + + client.forwardIn('fastReject', 0, function(err) { + assert(err, makeMsg('Should receive error')); + if (++replyCnt === 2) + client.end(); + }); + }); + }, + what: 'Server responds to global requests in the right order' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + var timer; + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + session.once('subsystem', function(accept, reject, info) { + assert.equal(info.name, 'netconf'); + + // Prevent success reply from being sent + conn._sshstream.channelSuccess = function() {}; + + var stream = accept(); + stream.close(); + timer = setTimeout(function() { + throw new Error(makeMsg('Expected client callback')); + }, 50); + }); + }); + }); + }); + client.on('ready', function() { + client.subsys('netconf', function(err, stream) { + clearTimeout(timer); + assert(err); + client.end(); + }); + }); + }, + what: 'Cleanup outstanding channel requests on channel close' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER, + password: PASSWORD + }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + var timer; + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + session.once('exec', function(accept, reject, info) { + var stream = accept(); + // Write enough to bring the Client's channel window to 0 + // (currently 1MB) + var buf = new Buffer(2048); + for (var i = 0; i < 1000; ++i) + stream.write(buf); + stream.exit(0); + stream.close(); + }); + }); + }); + }); + client.on('ready', function() { + client.exec('foo', function(err, stream) { + var sawClose = false; + assert(!err, makeMsg('Unexpected error')); + client._sshstream.on('CHANNEL_CLOSE:' + stream.incoming.id, onClose); + function onClose() { + // This handler gets called *after* the internal handler, so we + // should have seen `stream`'s `close` event already if the bug + // exists + assert(!sawClose, makeMsg('Premature close event')); + client.end(); + } + stream.on('close', function() { + sawClose = true; + }); + }); + }); + }, + what: 'Channel emits close prematurely' + }, + { run: function() { + var client; + var server; + var r; + + r = setup( + this, + { username: USER }, + { hostKeys: [HOST_KEY_RSA], ident: 'OpenSSH_5.3' } + ); + client = r.client; + server = r.server; + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }); + conn.once('request', function(accept, reject, name, info) { + assert(name === 'tcpip-forward', + makeMsg('Unexpected request: ' + name)); + accept(1337); + conn.forwardOut('good', 0, 'remote', 12345, function(err, ch) { + assert(!err, makeMsg('Unexpected error: ' + err)); + client.end(); + }); + }); + }); + + client.on('ready', function() { + // request forwarding + client.forwardIn('good', 0, function(err, port) { + assert(!err, makeMsg('Unexpected error: ' + err)); + assert(port === 1337, makeMsg('Bad bound port: ' + port)); + }); + }); + client.on('tcp connection', function(details, accept, reject) { + assert(details.destIP === 'good', + makeMsg('Bad incoming destIP: ' + details.destIP)); + assert(details.destPort === 1337, + makeMsg('Bad incoming destPort: ' + details.destPort)); + assert(details.srcIP === 'remote', + makeMsg('Bad incoming srcIP: ' + details.srcIP)); + assert(details.srcPort === 12345, + makeMsg('Bad incoming srcPort: ' + details.srcPort)); + accept(); + }); + }, + what: 'OpenSSH 5.x workaround for binding on port 0' + }, + { run: function() { + var client; + var server; + var r; + var srvError; + var cliError; + + r = setup( + this, + { username: USER, + algorithms: { + cipher: [ 'aes128-cbc' ] + } + }, + { hostKeys: [HOST_KEY_RSA], + algorithms: { + cipher: [ 'aes128-ctr' ] + } + } + ); + client = r.client; + server = r.server; + + // Remove default client error handler added by `setup()` since we are + // expecting an error in this case + client.removeAllListeners('error'); + + function onError(err) { + if (this === client) { + assert(!cliError, makeMsg('Unexpected multiple client errors')); + cliError = err; + } else { + assert(!srvError, makeMsg('Unexpected multiple server errors')); + srvError = err; + } + assert(/handshake failed/i.test(err.message), + makeMsg('Wrong error message')); + } + + server.on('connection', function(conn) { + // Remove default server connection error handler added by `setup()` + // since we are expecting an error in this case + conn.removeAllListeners('error'); + + function onGoodHandshake() { + assert(false, makeMsg('Handshake should have failed')); + } + conn.on('authentication', onGoodHandshake); + conn.on('ready', onGoodHandshake); + + conn.on('error', onError); + }); + + client.on('ready', function() { + assert(false, makeMsg('Handshake should have failed')); + }); + client.on('error', onError); + client.on('close', function() { + assert(cliError, makeMsg('Expected client error')); + assert(srvError, makeMsg('Expected client error')); + }); + }, + what: 'Handshake errors are emitted' + }, + { run: function() { + var client; + var server; + var r; + var cliError; + + r = setup( + this, + { username: USER, privateKey: KEY_RSA_BAD }, + { hostKeys: [HOST_KEY_RSA] } + ); + client = r.client; + server = r.server; + + // Remove default client error handler added by `setup()` since we are + // expecting an error in this case + client.removeAllListeners('error'); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + assert(ctx.method === 'publickey' || ctx.method === 'none', + makeMsg('Unexpected auth method: ' + ctx.method)); + assert(!ctx.signature, makeMsg('Unexpected signature')); + if (ctx.method === 'none') + return ctx.reject(); + ctx.accept(); + }); + conn.on('ready', function() { + assert(false, makeMsg('Authentication should have failed')); + }); + }); + + client.on('ready', function() { + assert(false, makeMsg('Authentication should have failed')); + }); + client.on('error', function(err) { + if (cliError) { + assert(/all configured/i.test(err.message), + makeMsg('Wrong error message')); + } else { + cliError = err; + assert(/signing/i.test(err.message), makeMsg('Wrong error message')); + } + }); + client.on('close', function() { + assert(cliError, makeMsg('Expected client error')); + }); + }, + what: 'Client signing errors are caught and emitted' + }, + { run: function() { + var client; + var server; + var r; + var srvError; + var cliError; + + r = setup( + this, + { username: USER, password: 'foo' }, + { hostKeys: [KEY_RSA_BAD] } + ); + client = r.client; + server = r.server; + + // Remove default client error handler added by `setup()` since we are + // expecting an error in this case + client.removeAllListeners('error'); + + server.on('connection', function(conn) { + // Remove default server connection error handler added by `setup()` + // since we are expecting an error in this case + conn.removeAllListeners('error'); + + conn.once('error', function(err) { + assert(/signing/i.test(err.message), makeMsg('Wrong error message')); + srvError = err; + }); + conn.on('authentication', function(ctx) { + assert(false, makeMsg('Handshake should have failed')); + }); + conn.on('ready', function() { + assert(false, makeMsg('Authentication should have failed')); + }); + }); + + client.on('ready', function() { + assert(false, makeMsg('Handshake should have failed')); + }); + client.on('error', function(err) { + assert(!cliError, makeMsg('Unexpected multiple client errors')); + assert(/KEY_EXCHANGE_FAILED/.test(err.message), + makeMsg('Wrong error message')); + cliError = err; + }); + client.on('close', function() { + assert(srvError, makeMsg('Expected server error')); + assert(cliError, makeMsg('Expected client error')); + }); + }, + what: 'Server signing errors are caught and emitted' + }, +]; + +function setup(self, clientcfg, servercfg) { + self.state = { + clientReady: false, + serverReady: false, + clientClose: false, + serverClose: false + }; + + if (DEBUG) { + console.log('========================================================\n' + + '[TEST] ' + + self.what + + '\n========================================================'); + clientcfg.debug = function(str) { + console.log('[CLIENT] ' + str); + }; + servercfg.debug = function(str) { + console.log('[SERVER] ' + str); + }; + } + + var client = new Client(); + var server = new Server(servercfg); + + server.on('error', onError) + .on('connection', function(conn) { + conn.on('error', onError) + .on('ready', onReady); + server.close(); + }) + .on('close', onClose); + client.on('error', onError) + .on('ready', onReady) + .on('close', onClose); + + function onError(err) { + var which = (this === client ? 'client' : 'server'); + assert(false, makeMsg('Unexpected ' + which + ' error: ' + err)); + } + function onReady() { + if (this === client) { + assert(!self.state.clientReady, + makeMsg('Received multiple ready events for client')); + self.state.clientReady = true; + } else { + assert(!self.state.serverReady, + makeMsg('Received multiple ready events for server')); + self.state.serverReady = true; + } + if (self.state.clientReady && self.state.serverReady) + self.onReady && self.onReady(); + } + function onClose() { + if (this === client) { + assert(!self.state.clientClose, + makeMsg('Received multiple close events for client')); + self.state.clientClose = true; + } else { + assert(!self.state.serverClose, + makeMsg('Received multiple close events for server')); + self.state.serverClose = true; + } + if (self.state.clientClose && self.state.serverClose) + next(); + } + + process.nextTick(function() { + server.listen(0, 'localhost', function() { + if (clientcfg.sock) + clientcfg.sock.connect(server.address().port, 'localhost'); + else { + clientcfg.host = 'localhost'; + clientcfg.port = server.address().port; + } + client.connect(clientcfg); + }); + }); + return { client: client, server: server }; +} + +function next() { + if (Array.isArray(process._events.exit)) + process._events.exit = process._events.exit[1]; + if (++t === tests.length) + return; + + var v = tests[t]; + v.run.call(v); +} + +function makeMsg(what, msg) { + if (msg === undefined) + msg = what; + if (tests[t]) + what = tests[t].what; + else + what = ''; + return '[' + group + what + ']: ' + msg; +} + +process.once('uncaughtException', function(err) { + if (t > -1 && !/(?:^|\n)AssertionError: /i.test(''+err)) + console.log(makeMsg('Unexpected Exception:')); + throw err; +}); +process.once('exit', function() { + assert(t === tests.length, + makeMsg('_exit', + 'Only finished ' + t + '/' + tests.length + ' tests')); +}); + +next(); diff --git a/reverse_engineering/node_modules/ssh2/test/test-openssh.js b/reverse_engineering/node_modules/ssh2/test/test-openssh.js new file mode 100644 index 0000000..ae2c4b0 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/test-openssh.js @@ -0,0 +1,459 @@ +var Server = require('../lib/server'); +var utils = require('ssh2-streams').utils; + +var semver = require('semver'); + +var fs = require('fs'); +var crypto = require('crypto'); +var path = require('path'); +var join = path.join; +var assert = require('assert'); +var spawn = require('child_process').spawn; +var exec = require('child_process').exec; + +var t = -1; +var group = path.basename(__filename, '.js') + '/'; +var fixturesdir = join(__dirname, 'fixtures'); + +var CLIENT_TIMEOUT = 5000; +var USER = 'nodejs'; +var HOST_KEY_RSA = fs.readFileSync(join(fixturesdir, 'ssh_host_rsa_key')); +var HOST_KEY_DSA = fs.readFileSync(join(fixturesdir, 'ssh_host_dsa_key')); +var HOST_KEY_ECDSA = fs.readFileSync(join(fixturesdir, 'ssh_host_ecdsa_key')); +var CLIENT_KEY_RSA_PATH = join(fixturesdir, 'id_rsa'); +var CLIENT_KEY_RSA = fs.readFileSync(CLIENT_KEY_RSA_PATH); +var CLIENT_KEY_RSA_PUB = utils.genPublicKey(utils.parseKey(CLIENT_KEY_RSA)); +var CLIENT_KEY_DSA_PATH = join(fixturesdir, 'id_dsa'); +var CLIENT_KEY_DSA = fs.readFileSync(CLIENT_KEY_DSA_PATH); +var CLIENT_KEY_DSA_PUB = utils.genPublicKey(utils.parseKey(CLIENT_KEY_DSA)); +if (semver.gte(process.version, '5.2.0')) { + var CLIENT_KEY_ECDSA_PATH = join(fixturesdir, 'id_ecdsa'); + var CLIENT_KEY_ECDSA = fs.readFileSync(CLIENT_KEY_ECDSA_PATH); + var CLIENT_KEY_ECDSA_PUB = utils.genPublicKey( + utils.parseKey(CLIENT_KEY_ECDSA) + ); +} +var opensshVer; +var DEBUG = false; + +// Fix file modes to avoid OpenSSH client complaints about keys' permissions +fs.readdirSync(fixturesdir).forEach(function(file) { + fs.chmodSync(join(fixturesdir, file), '0600'); +}); + +var tests = [ + { run: function() { + var what = this.what; + var server; + + server = setup( + this, + { privateKeyPath: CLIENT_KEY_RSA_PATH }, + { hostKeys: [HOST_KEY_RSA] } + ); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + if (ctx.method === 'none') + return ctx.reject(); + assert(ctx.method === 'publickey', + makeMsg(what, 'Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg(what, 'Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-rsa', + makeMsg(what, 'Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_RSA_PUB.public, + ctx.key.data, + makeMsg(what, 'Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('RSA-SHA1'); + var pem = CLIENT_KEY_RSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg(what, 'Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + if (session) { + session.on('exec', function(accept, reject) { + var stream = accept(); + if (stream) { + stream.exit(0); + stream.end(); + } + }).on('pty', function(accept, reject) { + accept && accept(); + }); + } + }); + }); + }); + }, + what: 'Authenticate with an RSA key' + }, + { run: function() { + var what = this.what; + var server; + + server = setup( + this, + { privateKeyPath: CLIENT_KEY_DSA_PATH }, + { hostKeys: [HOST_KEY_RSA] } + ); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + if (ctx.method === 'none') + return ctx.reject(); + assert(ctx.method === 'publickey', + makeMsg(what, 'Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg(what, 'Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ssh-dss', + makeMsg(what, 'Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_DSA_PUB.public, + ctx.key.data, + makeMsg(what, 'Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('DSA-SHA1'); + var pem = CLIENT_KEY_DSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg(what, 'Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + if (session) { + session.on('exec', function(accept, reject) { + var stream = accept(); + if (stream) { + stream.exit(0); + stream.end(); + } + }).on('pty', function(accept, reject) { + accept && accept(); + }); + } + }); + }); + }); + }, + what: 'Authenticate with a DSA key' + }, + { run: function() { + if (semver.lt(process.version, '5.2.0')) + return next(); + var what = this.what; + var server; + + server = setup( + this, + { privateKeyPath: CLIENT_KEY_ECDSA_PATH }, + { hostKeys: [HOST_KEY_RSA] } + ); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + if (ctx.method === 'none') + return ctx.reject(); + assert(ctx.method === 'publickey', + makeMsg(what, 'Unexpected auth method: ' + ctx.method)); + assert(ctx.username === USER, + makeMsg(what, 'Unexpected username: ' + ctx.username)); + assert(ctx.key.algo === 'ecdsa-sha2-nistp256', + makeMsg(what, 'Unexpected key algo: ' + ctx.key.algo)); + assert.deepEqual(CLIENT_KEY_ECDSA_PUB.public, + ctx.key.data, + makeMsg(what, 'Public key mismatch')); + if (ctx.signature) { + var verifier = crypto.createVerify('sha256'); + var pem = CLIENT_KEY_ECDSA_PUB.publicOrig; + verifier.update(ctx.blob); + assert(verifier.verify(pem, ctx.signature), + makeMsg(what, 'Could not verify PK signature')); + ctx.accept(); + } else + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + if (session) { + session.on('exec', function(accept, reject) { + var stream = accept(); + if (stream) { + stream.exit(0); + stream.end(); + } + }).on('pty', function(accept, reject) { + accept && accept(); + }); + } + }); + }); + }); + }, + what: 'Authenticate with a ECDSA key' + }, + { run: function() { + var server; + + server = setup( + this, + { privateKeyPath: CLIENT_KEY_RSA_PATH }, + { hostKeys: [HOST_KEY_DSA] } + ); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + if (session) { + session.on('exec', function(accept, reject) { + var stream = accept(); + if (stream) { + stream.exit(0); + stream.end(); + } + }).on('pty', function(accept, reject) { + accept && accept(); + }); + } + }); + }); + }); + }, + what: 'Server with DSA host key' + }, + { run: function() { + if (semver.lt(process.version, '5.2.0')) + return next(); + var server; + + server = setup( + this, + { privateKeyPath: CLIENT_KEY_RSA_PATH }, + { hostKeys: [HOST_KEY_ECDSA] } + ); + + server.on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + if (session) { + session.on('exec', function(accept, reject) { + var stream = accept(); + if (stream) { + stream.exit(0); + stream.end(); + } + }).on('pty', function(accept, reject) { + accept && accept(); + }); + } + }); + }); + }); + }, + what: 'Server with ECDSA host key' + }, + { run: function() { + var server; + var what = this.what; + + server = setup( + this, + { privateKeyPath: CLIENT_KEY_RSA_PATH }, + { hostKeys: [HOST_KEY_RSA] } + ); + + server.on('_child', function(childProc) { + childProc.stderr.once('data', function(data) { + childProc.stdin.end(); + }); + childProc.stdin.write('ping'); + }).on('connection', function(conn) { + conn.on('authentication', function(ctx) { + ctx.accept(); + }).on('ready', function() { + conn.on('session', function(accept, reject) { + var session = accept(); + assert(session, makeMsg(what, 'Missing session')); + session.on('exec', function(accept, reject) { + var stream = accept(); + assert(stream, makeMsg(what, 'Missing exec stream')); + stream.stdin.on('data', function(data) { + stream.stdout.write('pong on stdout'); + stream.stderr.write('pong on stderr'); + }).on('end', function() { + stream.stdout.write('pong on stdout'); + stream.stderr.write('pong on stderr'); + stream.exit(0); + stream.close(); + }); + }).on('pty', function(accept, reject) { + accept && accept(); + }); + }); + }); + }); + }, + what: 'Server closes stdin too early' + }, +]; + +function setup(self, clientcfg, servercfg) { + self.state = { + serverReady: false, + clientClose: false, + serverClose: false + }; + + var client; + var server = new Server(servercfg); + + server.on('error', onError) + .on('connection', function(conn) { + conn.on('error', onError) + .on('ready', onReady); + server.close(); + }) + .on('close', onClose); + + function onError(err) { + var which = (arguments.length >= 3 ? 'client' : 'server'); + assert(false, makeMsg(self.what, 'Unexpected ' + which + ' error: ' + err)); + } + function onReady() { + assert(!self.state.serverReady, + makeMsg(self.what, 'Received multiple ready events for server')); + self.state.serverReady = true; + self.onReady && self.onReady(); + } + function onClose() { + if (arguments.length >= 3) { + assert(!self.state.clientClose, + makeMsg(self.what, 'Received multiple close events for client')); + self.state.clientClose = true; + } else { + assert(!self.state.serverClose, + makeMsg(self.what, 'Received multiple close events for server')); + self.state.serverClose = true; + } + if (self.state.clientClose && self.state.serverClose) + next(); + } + + process.nextTick(function() { + server.listen(0, 'localhost', function() { + var cmd = 'ssh'; + var args = ['-o', 'UserKnownHostsFile=/dev/null', + '-o', 'StrictHostKeyChecking=no', + '-o', 'CheckHostIP=no', + '-o', 'ConnectTimeout=3', + '-o', 'GlobalKnownHostsFile=/dev/null', + '-o', 'GSSAPIAuthentication=no', + '-o', 'IdentitiesOnly=yes', + '-o', 'BatchMode=yes', + '-o', 'VerifyHostKeyDNS=no', + + '-vvvvvv', + '-T', + '-o', 'KbdInteractiveAuthentication=no', + '-o', 'HostbasedAuthentication=no', + '-o', 'PasswordAuthentication=no', + '-o', 'PubkeyAuthentication=yes', + '-o', 'PreferredAuthentications=publickey']; + if (clientcfg.privateKeyPath) + args.push('-o', 'IdentityFile=' + clientcfg.privateKeyPath); + if (!/^[0-6]\./.test(opensshVer)) { + // OpenSSH 7.0+ disables DSS/DSA host (and user) key support by + // default, so we explicitly enable it here + args.push('-o', 'HostKeyAlgorithms=+ssh-dss'); + } + args.push('-p', server.address().port.toString(), + '-l', USER, + 'localhost', + 'uptime'); + + client = spawn(cmd, args); + server.emit('_child', client); + if (DEBUG) { + client.stdout.pipe(process.stdout); + client.stderr.pipe(process.stderr); + } else { + client.stdout.resume(); + client.stderr.resume(); + } + client.on('error', function(err) { + onError(err, null, null); + }).on('exit', function(code) { + clearTimeout(client.timer); + if (code !== 0) + return onError(new Error('Non-zero exit code ' + code), null, null); + onClose(null, null, null); + }); + + client.timer = setTimeout(function() { + assert(false, makeMsg(self.what, 'Client timeout')); + }, CLIENT_TIMEOUT); + }); + }); + return server; +} + +function next() { + if (Array.isArray(process._events.exit)) + process._events.exit = process._events.exit[1]; + if (++t === tests.length) + return; + + var v = tests[t]; + v.run.call(v); +} + +function makeMsg(what, msg) { + return '[' + group + what + ']: ' + msg; +} + +process.once('uncaughtException', function(err) { + if (t > -1 && !/(?:^|\n)AssertionError: /i.test(''+err)) + console.log(makeMsg(tests[t].what, 'Unexpected Exception:')); + throw err; +}); +process.once('exit', function() { + assert(t === tests.length, + makeMsg('_exit', + 'Only finished ' + t + '/' + tests.length + ' tests')); +}); + + +// Get OpenSSH client version first +exec('ssh -V', function(err, stdout, stderr) { + if (err) { + console.log('OpenSSH client is required for these tests'); + process.exitCode = 5; + return; + } + var re = /^OpenSSH_([\d\.]+)/; + var m = re.exec(stdout.toString()); + if (!m || !m[1]) { + m = re.exec(stderr.toString()); + if (!m || !m[1]) { + console.log('OpenSSH client is required for these tests'); + process.exitCode = 5; + return; + } + } + opensshVer = m[1]; + next(); +}); diff --git a/reverse_engineering/node_modules/ssh2/test/test.js b/reverse_engineering/node_modules/ssh2/test/test.js new file mode 100644 index 0000000..4a91765 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/test/test.js @@ -0,0 +1,22 @@ +var spawn = require('child_process').spawn, + join = require('path').join; + +var files = require('fs').readdirSync(__dirname).filter(function(f) { + return (f.substr(0, 5) === 'test-'); + }).map(function(f) { + return join(__dirname, f); + }), + f = -1; + +function next() { + if (++f < files.length) { + spawn(process.argv[0], [ files[f] ], { stdio: 'inherit' }) + .on('exit', function(code) { + if (code === 0) + process.nextTick(next); + else + process.exit(code); + }); + } +} +next(); diff --git a/reverse_engineering/node_modules/ssh2/util/build_pagent.bat b/reverse_engineering/node_modules/ssh2/util/build_pagent.bat new file mode 100644 index 0000000..9f5aaf8 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/util/build_pagent.bat @@ -0,0 +1,2 @@ +@cl /Ox pagent.c User32.lib +@del /Q *.obj \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2/util/pagent.c b/reverse_engineering/node_modules/ssh2/util/pagent.c new file mode 100644 index 0000000..e900491 --- /dev/null +++ b/reverse_engineering/node_modules/ssh2/util/pagent.c @@ -0,0 +1,88 @@ +#include +#include +#include + +#define AGENT_COPYDATA_ID 0x804e50ba +#define AGENT_MAX_MSGLEN 8192 + +#define GET_32BIT_MSB_FIRST(cp) \ + (((unsigned long)(unsigned char)(cp)[0] << 24) | \ + ((unsigned long)(unsigned char)(cp)[1] << 16) | \ + ((unsigned long)(unsigned char)(cp)[2] << 8) | \ + ((unsigned long)(unsigned char)(cp)[3])) + +#define GET_32BIT(cp) GET_32BIT_MSB_FIRST(cp) + +#define RET_ERR_BADARGS 10 +#define RET_ERR_UNAVAILABLE 11 +#define RET_ERR_NOMAP 12 +#define RET_ERR_BINSTDIN 13 +#define RET_ERR_BINSTDOUT 14 +#define RET_ERR_BADLEN 15 + +#define RET_NORESPONSE 1 +#define RET_RESPONSE 0 + +int main (int argc, const char* argv[]) { + HWND hwnd; + char *mapname; + HANDLE filemap; + unsigned char *p, *ret; + int id, retlen, inlen, n, rmode, r = RET_NORESPONSE; + COPYDATASTRUCT cds; + void *in; + + if (argc < 2) + return RET_ERR_BADARGS; + + hwnd = FindWindow("Pageant", "Pageant"); + if (!hwnd) + return RET_ERR_UNAVAILABLE; + + rmode = _setmode(_fileno(stdin), _O_BINARY); + if (rmode == -1) + return RET_ERR_BINSTDIN; + + rmode = _setmode(_fileno(stdout), _O_BINARY); + if (rmode == -1) + return RET_ERR_BINSTDOUT; + + inlen = atoi(argv[1]); + in = malloc(inlen); + n = fread(in, 1, inlen, stdin); + if (n != inlen) { + free(in); + return RET_ERR_BADLEN; + } + + mapname = malloc(32); + n = sprintf(mapname, "PageantRequest%08x", (unsigned)GetCurrentThreadId()); + + filemap = CreateFileMapping(INVALID_HANDLE_VALUE, NULL, PAGE_READWRITE, + 0, AGENT_MAX_MSGLEN, mapname); + if (filemap == NULL || filemap == INVALID_HANDLE_VALUE) { + free(in); + free(mapname); + return RET_ERR_NOMAP; + } + + p = MapViewOfFile(filemap, FILE_MAP_WRITE, 0, 0, 0); + memcpy(p, in, inlen); + cds.dwData = AGENT_COPYDATA_ID; + cds.cbData = 1 + n; + cds.lpData = mapname; + + id = SendMessage(hwnd, WM_COPYDATA, (WPARAM) NULL, (LPARAM) &cds); + if (id > 0) { + r = RET_RESPONSE; + retlen = 4 + GET_32BIT(p); + fwrite(p, 1, retlen, stdout); + } + + free(in); + free(mapname); + UnmapViewOfFile(p); + CloseHandle(filemap); + + return r; +} diff --git a/reverse_engineering/node_modules/ssh2/util/pagent.exe b/reverse_engineering/node_modules/ssh2/util/pagent.exe new file mode 100644 index 0000000000000000000000000000000000000000..6e8a71ca581ecd0f3ad621efffc366f9f1edf9da GIT binary patch literal 50688 zcmeFaeSB0!mOp&^B}pggguk{jJ)UwrU@oNd;wqMz&KH}TJbU(hD;J5=04w3w>t@r zv(G+%JzYv*Pe`jfrRMED-jMP^q>3z-2BwjC+G6?yH4K8<6BSGaCh)z z0o_W}-<>m3TZoz6vwWJ6_2r;*su~Ty3y6|nMu-Eik1tCGO zP{gP!P?$Oi|7?P{fdf)L%FNwSha!cH{}hC|-1iTD`UGKi(mzNW7ya0C-x-VTf{<;Z zHV$gcd!=;yCJ8~u7W@!iA$&2s;p6{u(E2Ou*4`%FCJ5^;Mij~va`83d8~4wPs;@M` zgq~SQXvIfmK8tVMKQB_Qth@d0dwvLda3Y}(Upl@xF~Lj!yn=8&_xzuq|IhS*aw1#V zoSS3qu;&t0)hF-8{1B9b*=<3g)hh%Y_%SkFKl2J|ajqlWDktwv9R&yp0=qg{5d16w zzsf2wUz*zgh=IJmZw9?2(U2LG2pOfuZWUXQ&APmYjc>$VeDF$ zk(sgGEBJ#hG^v8UXC-K7MNr5Qgj%82j?X|FXQ8zK(^&W~Vq=tW`qmH|iIM?8|5=wC znNEUOmYaXGHWa923=IR;If%(ck&z|2IpqMn!FzK@ac=%A{-q(5J6m0qTL2W$li}7a z*NeMt`{?H+{rnaAXr2j5yPX}OPC~Z@YAuzl1%sqXxwDjm zt^j)kO{icG@`gAnjZ6oy0&;dj4h2-NT*G@I!0zIJx0`^7-b<5|r)kD=?Syi|SrMqz z>YZUjnyS>h1nGQD&Ik|4=d<5jAP618B!IKU@VZ;qtX_GjO)GPR4@sXXore3}!NEeG zYvYIFi}kL-5~maRsb#}zeZTf+$)S~GslFjSslzu!>qZzI`5KDSmO2|gbpTSjN-MF4 zTcw#|(~A^;*3(sI#ef^~uyHN>8HV|?Eq2R1^l9FoE%n3tO`htZx|>jSYJfcnq^dxV z^-BE^a!SeWceS=bc_{A;We>3R81^bZ`*kKCX3(Q_oA!>UW205;w3H3!eeCI|^K13} z`emALSlllJ{A?1)wW^9BFEf0@!2L!bA8y@B=mS|Hd`t{4FbqTUxqAOLVi?L&XO7m9 z*M>fjoxPvOV|@L(Uh51B|KSzb{>kXGfkN3SU05i)q-=Lnb zvZ!y_`-o{X0UrMo=%)d=5{W)DuvBO!aj~d@g?l zYK-{0{T;p@RI|0icj9`~#L}{`i#sh#Q2$UKD9P+b#)wj7YM9Et9BAmy412E zwR#w&d4W=&AxIhFL!is_v3C3xx#M&xXN3nO=O|rfY`9Q4Ye>$DTA`f%(^Z@j01zDm z*tiRXn-M46`o2-B?I)`3k5la*(fk;YRBY00k=XQC>i!tbdg_P8pZ)<6mBzJ1zVZ;; zcx8-!E%ifre@4e$RzIxO_w|MW<*DwkTcypxhd3?hgSNc(OfIW(}4%9vE3bReE(ocwP*IIl-d0m(JhV|S4yX+Ex zQ0&c0{jeZiq?HXBzM&=y5w?LT>gy+hz68Z1tGCgNap@;O^EpKGZvfmqpvpZ&@mGP~rOONDEcpsvx`@&PRQUTQ z=FGFEHu6c3beE;_mzG;{Mi#FGDE%=upF1!HcEV4;K;A^>D$MV4deXE`}mJ+%!6gIFi$Vq#tit4t(t|_fEgjV97 zQFV1&tt;py(s(fg6UOH(;t0(y;`m%qWTFGdj@bYQNM*4r02*7Uj!Z;nbQ-~kN z8e5(0pgRPf@VeGUy)@VCrBh}vX-VS#;(WW(ZIRxq zV2#&eD3sfW1uba+u@+AL6>K}Gx~d8kri-j)v_Pe-$A~YzG;UaW*@eKP4OmsME-K2T zJFKV}|4bI-Svhg1nvKASdR^6RduO(Kg|;2tC%i(SHe)q_(REg|CDGGfpQN?7P)aR0 zq;IIus+NPt`N}!1)*evjP#XL2oP>nh3s$VgM)We3t0(Lrc-3iDqB+U|d&B8x4|s*m zU)7GR21EfMDzyXX7|m{W%o*j&>CrC}HG2}R77kClggsfds#hcX^$n*VLx#<7uW`4o zZezd0E*>iK(qxem*a_fZ%%Qqi@)>lP+4um+lef>)S9ejvNE7Pbc!BnIUT5@lf)aI= zv%>{~(6H1=qhF<`DD61`BQqCMDZc)r_mrWG`bkQw%g=rZV315hsR@t%^ErtdGPA@c z2y*b2d!lvIi+F`u4YxXtt%i{oMMHT(n}2}ns@u%GGf`bUZ<&LNJe^TLF_woTS@!qf zZ_+=^!Ai8ysg=33dWRO(j@sHiBO4r^A@TmFz$G-CMze&vY^5j!dK9FIl>y~TQTamj zjI3TGC-hp-p1}?*QG%Qi?X@bO*_6|cy30;IW8^;iRJK>pI$ohfrPp5PRJv`pP9rz` z(iH5Txsjbn-VS#*xd#!~hc5$xi;p%ZHYGKeSeh1DnoF!GwrP=7X}2jIyznLvV-T4oB<}G+nUu*@oi1VNj#@;yZ}1i^}#n zR;+_5o8`}Wi`!>D)eG}K}YevF9&xKoIbDKl~EDC-60w*e*WWw8*A>&3o?~-h00rS2W=kgBcsE zudT{Ad=#3-L2e%hhZdlPu`&DFrhG#qE8ZW#zA!Ea%I*UWL-+(B=D_E|mxV6}pBo?g zIvahR4L1vJ7TgTD8E{>2U2vUnop2p+9dPY%?dOh-*+)xI-ZFe^@ol2A<6~1|Ll2A_ z3m!&m$BhHp1{gE$oUt>R$08@z7%SG8FD}O#gT4Ez%i=roeBx^`lWQ}Ax6-C^OFHoz zg>zbudj&Z)fMXKYlYC6@y=AuuLQNr~(u)>+Z)5x3!Oj=$AZmg$J5L)bL7n=?rhjOrq1jQc1O3EXWnUx}HE$rhN zaIqFAVH3C%-W@ZMvt<^3{KlLdIL4f8D$CPHOpOzqd!gJTC;Hh&K(k-sN54|@ozNUw zvRZcBU(UXojZ&~i-_A7OaeyoJJp#d36MQ{>Vp|*ddUj(Pb~V;K4=mrK*b4cWl+&^o zU{g*ThU#-_CC;j6?*}K|h@H^fZ|OWl`}9@l(r3L=8oQGxU4$vEe_r7}_N+ykq^`C< z>k=D(ia2elL+Kt-)726xOzHkc>9oQR2(bIl(Lqw4#oil2>soU>eUg)PaKG2nC#Qjn zWb;Af#AYQ0m_8p~@aJ@QcMs?3SsOV4s~VlEm37 z6^wG3(wT!=fwz92R)&4Rkftc7N2EzwROz0s9gUtzh;}F1?2i(sr6=t5zU~zUy!IR^ zqt;qcQO+JfA6}PjVZJDkDAzuWs6Cb%piAQgW=HbnH8h83u4& zvvPIp(6qM)hiulgKoE93+l~qQI#9&!L|ba5+fc)^{~MjZuYmvNrP<|PHW)l%%;eE z{Dw|r_JvMczbUsCK-LrS_51Z3#r;t~ zdxwY&!iwv(2-TT)*jDDkdBD>tKJW|*25h((@fChn0Z2?qP=wg{uw4)q8XOUQO0A{B z1oE@1#=-m;Fz!~rzcwLK)?Z$0iA1QNa=)>Q`XrXy$Sk1t_Js$;#v){>6(YQ~{s7^q zg{e`RfUYw#X9JLWJOyAHuUd-`wb-I+m}sYP7j=`kZ2+8xvX9y?h>dT75J$@d4&w@K z_w}cpQ}AzjMk>T2Zrh1~5vKADUV5*BlIB_@`&_Ghp}5EGKBSDGr=v=nRpH23)m_v+ zH9|GV0B<&U9BT#HRMmKRPm1BiR1YuI8pBh%tTt_hU*+lS=jQ-tb3CYhCN>b$6!QAS z`%mM<*y}*Lmfk$k#Ik9bI9p3si^YQ8Z$M4%LqwVdM4CGg4DBPD5mY`9V)$8FP&lO( zmVvgu1%!6kJ&?xP!x1`DCEom4ra0saT zKX1&jd8akQeg(!?=*D<8T(<7oK);n}_1s^7w+_0lnXUH=5l$V-$6(7^VRo zhjb~@lxJu{0Cy8}B3Z(HH6g!H+%wVkiS2C@e+BqoVA(~ZTAQ}AM$OWAvuy2_T~vhm zTcf&c?G5`VVuV_@_y1|j?!>(LZp&_?xbZDpL@m3KLz;7)&v62=;zUD-oZWU7kq(!pKSEyipJm(7VUWtx4eJVVnAt02vZbIVuw9wNwh zEBiQ%bVY5#>OwvRAxZo#)(vw@XD`F6=Iirg({mq!Rr=NO)rJ;xuOw=m3_}GM$aZ(D zp3Cm#`N1;@>^^vv94=>cY8;U~zErH#%7)jt-&)PKfpI!S0)o(gcm?(#z+*@JDU4P* z@(jQ<4z5!?HXD3OZcF>94TtvRc*84=I`&)1*dJ^Xw4jXHbQK_*D5SVIh zalsK)1A%>VfXzYwROwd**k%v~?NGB+~2laN&a(QBVmNGDPe<3k#&x8h9e7A_dTl zLhz$iRql7MSC`+(=B$G3f)LXgYjip!6jkQ*T*>}|g9-7bI92Tjfwu-~ zt?YG^LUGDkh1@7)1)SAp=NfaUYmLlhyj;mvZFEDFJ1U2zliZ6y=S^aF@Q-yM0FmtI z$bjFozqR)O4r~(_D5tNeI~ghci*kCxMl9GXAh5B&^(xY;+Y*YAuQt8P(>bN};;7Q< zsBUAwx{}UNa^l{cgI)o09!DG77LUp`qnu_v0rnZPL&s3AI@ta2R%w0gRk&(C<^dP; zjQ#{eSKxqa(5&K0HTCXXR|H>PH&kYvYOeMTyI=RKw=)Rlp-YO2s(BRyOS7}i!|4P$EA2}n()HINTP>?^e; z4`f7&AAlmMPZLPRbx6w4K4KY=c%f2DxOJ0v>mtoJq;;WI`R!1>AxODe;<+fikJBWf z5a1|W#|~21*Kf8C6SJRMs~7KN#w?U-mX=z6egV`=!CnvG17XU61+96Lu*BeNjgz*_mcd+ePP1F)QVh6nIta64@ zy3UG9EP9IOK*?S^CAP1wK42FH7C609qH-TpXt~B>`=(TFLX#ED_U=~Q3DJJJpWVq< zKrEtIDHGd_M3b`1vETG*AG2>T=LZ%!y)?eVQi`(GA;@;iH2ntl0aCDm=`+#DWAf`XpZt!+Xik*EKav%b~fBsbRiAhH2ST!?Yr@ zyD(tb*z#C5Dvlj#Upos1d*4nZ3sQ;}egPik)^*1>z)pQH9z}DVo(PL zytyGUd@p*%J>VG;8}ERx`F!^wbr)aickyL^*Gs^s>CtcS+xV{7SO|<3srzVMFXUk2 zwo4EdxxRbuijAVU|A4qZ=tPw7WX$s*CAM&6dkfxso|%P4@cmi9tQsdvEv~R zAS1NqqeCBoqweCJwTpM6xxeh9?Lyr}-PijFvY`%z$&;cwtDAx!Ds>qnEUz1PAYI&--^OKG#4Bde_(R-YtXzzj#u z9|g4!^2lB?6Vdp9-_@ET?tddv>^Nc)K<`)B>D67-vfe+#OT}JtZn2a`#kL|j)(5>W z!LRW>aD0Khl*f4~i>2kEGeN01bmnd;D|BX~7;Zqk*&YGD-Z}!Cf44koQ7jTF4s*DA zE0JXOj;r^^QGZVFeE6G_=3XmL3vIQ`ma`y1kL5)V#qv%8%v?8zxWj}a_fD~~m)v{A#!tzW#m0}x-6S?1C3l6`cnGe#i|9dlhQ=0~IL#}+xch*g zoxx5AX0JI+WLE`cZ-i!BgcBV5FU*(bq!YWNBK7V?1>BXlV3;KP?lwAF&(AYT>o(S2h0F+`3VDiy)8WI1IlN#j!j!aF(P zW$I)QjtS^OIpZjkFc2G`2dD1&L{8l?8*G6ZrbvzdcJMWa8qv!SE)3mHc0y?DeR+^W zU5c`)&5zNMa1#gY z;BXXaW;C(ty$Tg+w$W(-aj*xbQJ%M@*=F;Ig2mS$r=T`S8T?>6t`VdPk1Yg$CMGw3 zw7q_|&;yF;?L@RPv`&187!9%UIDVC(+r@3WdHfyXgR_a*8@f}vR~h=TG*cP6M-2ZP z(gye_g-o=8gdp7(8x6EwU#_`Fab~-5!Xs(abx;wS*>WUjjTWXQvqnvX7?gX}#IcU) zjZz2mHOFgKffW-+eVRdoQ6WUA{u-!iHm$<0v^jd;MnIkrI&+8oLzA2k7WB$OPFhRxiiA_m^FU^=s2Cv2>K#23Gs0hyiIrN;l;2?|-jU(EV)GpYoHGGbi zL6f9#M4=#z6<2A@DUHEaRfYD^6u$$OF2}|L6psPSze1i68w|7#)1HWjCJxk<{LNU)Y0;9ek6%6tL$EHkL>9{yIQ5a{zmJd-#16mERQp z08R(r!a)Q=?-IaZw2(mz-%KU9*$1LIQkK~r15t1|;%f_sbvwkAyTaff#~K%#1=}$uf7FI#tNU22@9Lf7=->i{uf2nO89J9H zw2ss^Iq^b{bVNR7ZY2g^Ofl7eP6_PDzCjHh8X%m<(bu8e1`rL2rEf@J5-o2RT}1rjsLb=^1b@}#WjdeQd+H^ zx8+lLd__7sq|f9_)!5WEmnuDJaR~D21TSF!X2O~^Df?cLmH`L9#yeKrljyHaB$;(2ydCBHZ-io-d2y2Ei>{ob zMJO64ok=DTN8%95hiQ)-)u$@_ux{uX1~1RVQaj9kLm+0Ggw0pR@PwX+5C1r9b8cj% ze~&FKB{<0%oF!!c-G$~n!{e$n&JZ>yHSrC*u@y4A$WwsUIDhOOXg<>0fiC(UF4tTw z>3qK0{nkph1QYa78xD3axxK>gul5R`<6CfzSBT(y4BsdC#MyNC57JbGeNpDESi9VZ zuqHnmZ6U)aEgCx)n?j^awiAbo7_VIcZ|@l*hZ5(rirDxKyw7^Y@R#ITrBqXW*4u@k z(rJO5%GBIJ5EaP92BRn7xT7W-8?uZIjsW}QIJ(o%?!azO(ls*v6dN_lOS+Q#;Q{l0 zzg!+*FA*g1Q%(4CsnP9spy)$_T2mD-Y+MS zp-B$7h%H&PE6bVG42P8doPZrhPUK1Voh^;l`AqIE>VTHG5tR)Bo9wl_#?(kOG?j4ktzkH^8f3s~tEBX@U!eHa67gFH!voes1~Qs!~|8BrO(nt+uH*R?4??p^v}(U zp?OS^>bNQL-A0)!R6im#rD|eip(Q2~X)jJsNRj-S*t{ft1_Apx@cbr+CVI^r1d1US z06HT@p20ycAyC+n!o)cv4hk?ZGz4}DtnVUajD+fbEym?E^wcZ0y0h30$pWV+Lx*YLgQ3q{s;&hh4XYa?WbieGrk@ z+SF>uUiv*CygDH4CAquQ%sctgc#@@CP?$i$Su)cDrJP~jTGtSc3Ya3 z)LdpXElEt!XLzdnaYNS<9 zXtn@(s6Bjr=mrZKDTa@r71V>BaBpl*Z9KFoabfQR2tao#?mnc8%8-5QL{wgvextZM z)f;-b09>jdvF3^5?l;8!Dg|Nlo5n6ULBx1Dvg-f5z|$$)ZC$!o69?oZLK76PuQ$&seAo+2nZWl7)F)`gwz0k3-S~rM!ccPU9_u$T+XM?;Niu=MoODL0gTI6V2&7}iC!2); z|2P3%Km;@yb6QHb43Q zy|u(!4*f_B-$I_-PYAuM;DJdvnN>iu)cGF}H_is^sAlQ&UURwbggdre??zXhqgi@2 z2DW1qj1|NCf3I2EOKUQ;byHfsumY81#oWS+nS@&OJryhkYvlLqo*sshaa8v-W328g z%yPa@_w)-SnYyQ&so2r&oGx*YPVP2nM+AuXK7vTBcG7BCB>x0Htz9qWYvCl^P^g;# zyKJoJ4mSBCUccUfDPgYGbO-=O9QF@@U7yY(RMJINszW=e|E_k@in!n$cVL{_l3aWE zUFlADYjbg~qp?+<)Nm$*BI?rPR*?z4Rj|gd)J`jB>~&WUeyn_Ilbm@+o07=fu-76F zXr0k+hb>Bu3(gy`lZ5e`lz8fS6r5theMNAnU=3XvvYTxV9*6AA$Xv&lZ7DPOCW6pV z+--&v&^+Tv%m;z8;#PZ?N(lP6;#!+16@t~hij4~5NJ0b8Eq{%^Y z*~2-g5a)vwjt@muvC2GHz0E}%g1Pu%I|yCC$jbtiLDwyaS#=F=*<~Ybe-3`Cv>{LX zdQnS4F;?S<(qZ?E)SXm{UZQ%|td^z($;W2KYQpG}xsH)ZvF=u=0CR$jCQH(n7+eRt zh`NB(qgbj76H37;ijBnRwG_kK>~sB^41rJ&9v)|L zUZmCNpfg5oh_z7O$K$akFT_+c3>_s!uz!vPc}c7nHB%-GvQTB-M|cq=@Fq`nw?et% zalXFk-@FimdzGO{bzjpUYWCMRBPv#J!--)OGEO>AlnFMP#AMo#`&gB_&yG4hXT`?5 z0a$BeOWZW-Ll&_y$&Bz&1c)w8DcX6!D3DY_XG?hj{)O=^{=2OCj9MS%S%o=&$z*)^_xCa5Dc7E>8+lkFyyl)1-wd@R3C)Q61DdEt@_V9d=_51xEF- zxORhuk5-oi%;oAfPA8NN2O7GL6DjLq7A$PAnSzlgO&T>HTSqSY4K$^(1HfW^#Z)IU`$gplcg5TV&Z*Yr>vYv|OX_b*J7Uks^fVO4J`4L|agwhs8b4rP@*DGrQyv_a9bz^YzIMpW0BV zlpneASE19B5xdc*^yVnhL!PpX^%nqdN1<=HZl;{zDI3B)w6ZKaUsEpDd|5X=>yUH! z3SxtPjJiu-sVyHCUtFGHI|{oAYJT&6klog2?edgoEpn04ZIzN6aE}WIjE%`?Xp77oo+Aki_SG?*lv`~==+@~W zLs~_O#!4&# z&B-|3OJxI5hn$KoYeJU+iz00hasaIZvJSf2HQTh_UxZt%p0k@DbRW}BQ4ze`NMmk5Y=gQJLrd);{oeKxdjpQ{3AF<& z^pwa#gUdf!|AsR8f;Xf!*L5o#-(qx|kWh_+A+Qe-^_zXVgMrH`e$4Cv7p%0iV8t#C)j|D0{hc~bEq|u11uh8f^ zk9P7q0uuA)*0svqd_&5Qua;Bu4oj}Zl4G$vv7)wLcU08=O}ACl4j7pNYSWSwXClrB zdM0c8Q~VIX4#TTv0R@;b#P(0DtLQ7-415u3N{+xD83hMibsMZD93D*Nm70IMn?oJ#icHp+zTbdoFD=;>xbp{>JgY$|2obDCTuyXQB`6e3vI%2Rmq-*vleQ>%N>^Kly~Y;_O_^+;2B+3_o1oT?(l1w=x* z)GxTwwiGzDfoSR-sKkd^I=02c)#9wq6JT+qfTL?b2fK!K&uXJR^R|?4q zE+%*Ldte9FtZq|sIUpvXeo`|kw5fziU_WJsjUp!hLXXex!wVAUelNxl5msbS@>7tc)-C%W^;rKr{P|1hyJ75EEf6}NjU$Ul||O~F->VF z&6ZZ(4=P?keMD1bnw-b`0&AJMV#?=NvL#VuWq%mWgpt)#)p>eSCA*rEzT5Lx0M|B@ zdKp%*vAGe5m@QN0;iTc6%gt->N$;UusEg>xI?$%NA(1eBpZZCGZvdLwWb2V>9QBK0oCrg<{ z7m|}+bP+j8xI_H7MNVLcA)o{|3!AB6(=sBfQWP35FTk3iX)y0hTtm#j(p9i5)1sAy zE(xMCXXNGWz#K0@eA5u+zUX>8l3<1dLQc~Bj_|wudd=SerJPBCiR7&g6z=Wq;Spd! zyWoMnT7|ayR(0vE(9P4eyf8_UYICc!EvuBZg%0Qooeig{{LO#S1Z~UOq6hI{j-1?4 zvQ|N)zNH9hX1IaEKQ8)NJlsg()kQy#hr<+JS`>+g@2Bv*qKD#PlB?=h7CjsfH&b|8 z(JpJOd}I{#^rDw24CTjKAR-tW*48?VcdOJbYhh!I29{G`^{p%UK8PE$MK9p|E}*aL z&c{hgn?qOFa2pBd1KKLNn0juJWLXqo-&}xc7LgJy&54lud{t{-RLQP@f`XP@wTLdT zVJSImmQbO)18hGsgyt1hMOIj(i*P%J8K*H*s#FqS%n#r+XPJqfr-P)t2ppB|R0xx+ z)8HtuNB3bJnnBnI%B4a_!c#yKgNy;H!pkRZ#vH1U-NT{COKJ$cCw{J+`9b_3T@DF+ zg}XJNCTV}wmODF0{wfsu98xj~41Er}IuT5CXx>FK?)!kJ|NTX{H&lWseXfy&N#ww6 zu)^Hg**p!W7=H;HxfI=i7ECmfNIC)=wK8XOqTzEkSxQg=s%tzZT|=)j$j^n&=HQ;y z*<|@wD9PS_y&=$X0L_F!5YT8&oSTE2G|6&eiGFIS{y9%Lf&>!i0YPvlgrLi?xQ{J# zAJVNQ3qtc&Nv8?hOGs%noj?BIjFdosaFJ}+`t&q+E0$P1dVv8)qbqwNy`I1DBZn zU@I|_khI)c(n}Xskv*y1Do~G623K!7BGHo7E74~*@K}Z={)Px1rXd!va4L}?D6lUh zE==h9kc0_MQ;&Z{ll}=z`WiZd4L4XAZ zO%V^h;~RE<&|+t)E>p2(Z2?9QH4ZWw(t~2B?5T*k{ZFN?Ygx zrphm4VxJ*K27Kc<)WPmN4aBrASPOo_<**xT3f&S^i&kj*<)N6{SE&^h04@*+R!MChmTdnJXKH4u329U>_D`G-&>H1wmIHY4*{I9Pp=4^lcX z14iZxR6p*i$y1CuJ$!h`SOyZ*IKWN{wy$p6LCgakM<&4<=ENu@1ea-)V(g&uC_Nn< z=1a(=r12Vht1vpXj$n`&hn4SwIcR!%{i$n`MN`gC4bZr`&k9!_m#A$2@!T=cs#5enSfUkA3X#{|a2(Uy}OYWxqdmSUMEV^Vlgv?dAW1N8!m8EtA)>Kfu*F*hK z$@~JSb zWR@Aa$PCRkLm6ght{F-+LtZm9w1?*_HbY;Tp;9w++zhQMqG6&+4pRiW>$lWPTza%6 zNDuy?39V=XdR=TVXv8G2WWOZfc%N6AqqK=KB92yWZOh%rdvPUOHvxm=DdL9O84ZT_ zE#!A9^C7mVWPhQwcp=k}E5J&qTvmR>Fo>M_@Rfq`#HhjVWOGfwQ8}L2=<%0iHXBO< zCRrQyMp85c4}uyijSqu+$!+PTf%`D{0uS){@L}+29^gazp;AN;kek!AXrXBUt$Yyy z;x!qih=2tCB1i5HnprKBvV&$rM*?M>e+#Wa1jX~36S5^XbIcda%xUO1J|sQtPBfOy zfPmkaL)o=CG>>71#;0lwS;H*PMAn>|Ehe(O4))L~ZX)+Xt%zoJP*_C-&D;5raik?a ztBd3Gd^x!G7(M5oOV0oYNlYPHo;8fVK8KdUXyFB&k6@aL`$);uW~GaGR7oc%vdxPt`e>v#M5+^&bdfy>W zjH~UXWIwwRd8+hx%$wVGeq1--v5DZhno!QSBu-imTOepUGlF|*Cf|S@%3*_RNV;R< zi)_~SCSr30_5oZ|<*Bu4@Ad8gHJl^D=#0Vr=O`TW!>b?VB8k49Lh!1P#jbQixXuZ+Tq(L0`@{&C zmZWJ=z)*mM97$LuxwIdaCc|{s8@G@!`dt`(p#E7625gjraMA5SCm9e)nT6FZ$yG2- zJ`Vt3ZxY;xYE)dYAbx|BQ6F1?afe|-;ff4-nhBIB?oJ0(LG>`n?1xO5y;na>U z(+>yH9bDvDkBnN=3kWKwEn8;bmh2ar1GUMRK}07>^8{9y~|}S4}mwf~moq z2z_|KtjCvxrn=cxJcBeskHAyv#?-> z5F+Coyo3gxlcN^vXDZRb^?m#Be3RtXJQnHd3RX^WAfw}acyRNaGZ}Py)vny$gTq1( z)XeMzYJk=Sm)_oiFNW3k++-~SQl>S5{RGW7WyWS4(gl8HjUI|zmWyX%axDFAnw!#7ES?iICwPnO%^$thxka9EFE>616>H`j8^r|8E#6{n+$7}G6 z0@u|om}`qR6!14|zQH42sc33VVQH55$N}xZIae0V{Gk2PxwcMm&w`AKfVk)0MEARu zxNAeNBTj+F3HNb5r|Xh2Dz=ZIcu%=rX`Klj3gah*bBZ%n&9x)JAyBq?7m{dL@GB=A zV;3NYZ~=1kwj*vrVhJapAau3@yPS8W9jn_iEK65V9Ii-Eil3*@l6{@#&5g3@gGWH) zvGsraok@OWvW>rH<9@e+Ui4|@EvLt9V&J3=AG>5}X1L;M46byM!tB!d3WH@`sbqs_ zXs_LlA>Vr*|Ft{FKY{$joojuZ^xQ_IB)ihjuKW|NPU4xgQk_RIXOhs@%2_%{`9Ee*g5Ax#`$fMyzHkvR_ zCtn!}Gl)t2#s)00J($CH9*_L6p@27dVb`|ZxA8w z?i6?1Je?aSz$RlZ#UYm0>yNm2&4?3F{T3}TLsmzC1?>0Ci;bCM#eSXwUpfDhW z`9U&fnEO}-;t>n3wsJJN5}7L4t5ay=6~_R;v5f)nTQ3~wUpY`N_1iWdRYU5n^w5&T zsjlX22~?~;fv$UJQdAB_p`fO1iPT|wQqvRMLv4LczuD}@j&Ki(`4td$AG(bDwMQu? z(wI(}Y)2xu z{9u}#!K@cRikSzoOMOUu>d+2;qjS+Z+{4&BF~|>1IqZ&4h|OqcR{*#T#}zAV z%FLB`V-U|vdFJqIpVwlNU4Wzi=*eZoJh=o|&wl`Z2LY*zf*OZLHh_*lchcgfKd8w) zsGyV4^2?_*b~lJZDrkFK@jA3rQ~SWYV=0xVbtI4%-GCGD3D6Ghu)f$9r_WVJzLn1# z55fSVA5%t#iIwQny!Np~{owfBS!*AYW#o(_R(h8eN1{r$7UKaEk^!~e;TeHNaT|LD z0|~2u9Y6*R!?wP+8;unkAh+`m43Yrf;Kop;$k%ifKviCvA?D5n9=v)1XU0v@iT+YM?XwHP{zebVo z!55LgX-;oEBsPKoU}aE(nR2N2IrzsFvk1kQ<_LEk%EvPYk>B>wAmPQZJiJYYNp_jI zdmdfBMVECqhmIrBQS6zD(kzQSLmR$=storHNH%!{183lEt#*e}-)%<;b-h{#aO+q| zk3Iu}!smJ<$yV zRQwjY)sc@*k1U+_@(4-`IUL@_@@Z|MolQWwC{?X+=r$F%$jvp&T&jLTOO^ibg>#5b z#PBi{49;uR`XptLPlSwM0Zjxa6dp=hx4{m9rzYd7l zNkfqXj+!O((7fovbF$nZh-DT;@hbdQh{QlpZudnta?KkxOir zOwdY#Mdm#5vseEe!0PH?g;qo}!}w^RWF1V}ze2Ww1e%=|G5jY4FhjP&?8=;i3+*ie zY!bAEjutz2oh=TycoYhRH>%Ps zC*Yb>KxXl98{U9+$SC@Kz$Z|VsAx=QPH50u-=y6j)M$+VUqcJ5KnHYGd=aJdltX*q zRp8}5R*7TOJw;78mq|hG0qQYCtS-!wZ_ys0E<@;igrLfs^$aH6Zpy_RP*^|#`2e+f z3?Ow3V9R6zFm_XU?9|hv85z8kvDM62jFP?D0|ybhf~GQF!T^Bwz?<-MMO|KppS{GR zjv$Iczu|?oBE)_HFOJgM1JwUTT{K!#3Lhh!2;Ig^thT!;iPZtHsw%Q&bZobpV>>}h z0`{pu0@AlK0xDcHjMoen*~g-(W+mR9 ztEH(RV4FtaYrW%kCH#Y``mM-Ff6yY{1KLjM4hc4Y*_- zIJYg>fV0^=ie7hjY#09YLn@%1{X24yXmvB}()bc!%pp;q(tbCdxIw@3_j}KogZlw3 z!(a*u?DGs9n-DGUtFp<6X3brL~7yd)|U>6P@Ll*HGC(y}Y6ChL&!`p!ncO& z$Qu1GFB=B&9^sgng49<}9zM#OpMXvW)TywHgOK zu+RlqaF8(r7ofy&EoFhF>|tr2YSCXdkH*T-KZ{{He|S!bjqAxhD?V@+Txv=IWZ`H^ z?@G!^Qb!UIjC+Xx=d^>8u^UJsKqukUR(U1{flkFv{561+S{EHK;r8Jn_qB-c)Y{3l z9n}}XzX~yACJ&gSy+@B7{px5}>LIy*@RZgED7-y^6xZMvwxf0F9>8sM)QKLzZb)95 zhlDB1ax>h=;B&y|9Q-3L#r{#y-q7CDKBjtBA8-jZ;y113FqYy*2J%+zfb!YaC`*!l z5FNeapM5-mS z*i|YID4&T^wsydMEc%sIa;TFdi|sc+vDIyp5}PJ3D3#BwT+ya=4IWe4TzRlHxmkM; zmi$PwEVw~F6V)jXO0wT558cwc9Ni6d#3ot}!Vr_O5=f~v?hIDHFU~UhG|wy*7A=Lt{+2j5PLAIi_3jdx88{R zoNcw%s=TOl5k3b#_~nJ{3>n#itLlQ{AcN#z%dn5{Clq(kkG|a4xJrA^d}TJW&Y>mh zS89jyLaO(_40ngX$Sr2llD_XchHZXvY(o|gMMPBV_>aG zUJowJ-3s;;<_jNUd<%WuauV!~mxJmH@qiX~gv-a*-G*W)W|o$=3EheJX0^1<@Yr~;u~C3MBzYOrjg4z7jOE=`xExgN&~RCRgje`066}>@z^aX> zor@Ms{o^=c$vC1eT8lp;(1gAjBZ8Ueg|Sg@nKc%qCv*61e%!LB`wOHBc^WR=Ul<1a z1cPt!{sthYjpUt;EVAReTAf_M3MV3_3*jnH*ZR`Ef&=_O*LwVs23wn_txkj}bC_+x zv4X^_@+5Hkh@Hn{gRxlYs;FW0M`rIecb^tsIoOjTu8{diO~ic>D0o{qGCl|2FHQAw z0Dr6|hrQoLrIT8ft73V64vc*=p*9Qe!NW)k5@On7_Y$36y$f?{GHC~BGJz?lb3Z+D zA0L8bY9%bo$^h8LhDhknmp_69(Q(UAycky+>qzB9?%1&qbEyJ^MJkX=6v_UGeU|QG zDMk6{YtS5%>x11-(d6j|4VtF~7l>agx*Or(`$z)MOlQ0^ga%hjfvXH2q=XhK#Q2E9 zJ9&8aX!sExE{KO)yzpVga0NG;B_dDDT*U35A6(%y2ip^s78RxutTZ1G#vJnEbu?Ty z0ar5%mJTxs8EJB)?09)CNtGe1hfN!z5(n_-VC3Y6GM6Ca2)cU*;o|Wz=`s|A^otDK zOF2P#cMw*T0&<`fhasQfiN+k>9*Tp^8vpkgayL(;Q64>+t?Q+e86N_E;s#Td1nC`? zj!c&J`}mIi+uvbHY3CxzG8e2jutw6e8h_G0tWmBLU=n`7dXwCbR(+XwRNP~!QSB)> zY}TM|+f6cVjG(v!$iV)9a^g3-{)1NvTPZwCmJz6gd9VwnP_Dw|PWil;kdUl6Ok^gq z5M|};LoP=~{}Xa>JgoW==n3LjS@rj^7o7!fq5U=Qq=`&=44F%TjDC@JGViajwNHfF zf~+H8o052wWV=z;$wHayJaDiG-v?sE5{81Hg>csTYt-T!MppE$MRbj7WhZ|-+LB(r z`_mo{0rHceAbTeskiX`7s}gKn=>w{b^JGXtyu`C?1^v7MMdWMIs* zD|jFdjg6Esb_)g`3nQ*V7ZF>uQG~I(7V5;DftF(ht_4>+qV*03$xYB?Zm8{N$LN?$ zCQ0Wt`y92_Cj1?k>eKDMGq|9i;y%<=J=E?yEi6DuxTLf$13Xm3H|#Gb^(o2$;j=!h zZ_2?O^nk0t*H7xCK2x2fXNY@z!x#IUA^KxBP_6VS(E5POi?PYXI57ZAw%8vV%E77vbgxm0=R^qxEnJ-ua7%e*1sYOS<5PQx+!BRp=&hY6Jxax(HckA z0r9^J;|;JMn$<(DIMTi0aDb84vx*hJIMybb#LSI|vA>P<`F{ZjH1`>sa!&kVGakmT zZ}9HoGz?8MaLHhONQ}|Z&*4q&(2Mb|f+7_JSk3de`2@R@*|<3nU`u}i$uC=*PJ;{b zzAphKK*&{+4N^(RaQ)UqK-Bu$gqxX3~Rl=_V?cD{eLNXe&B4lWiEQ zc``x3;x3KeV}nwbionJ|xU~h?QPj*&Z2?9+{rGK&OF-X8u>m&i2_(gK@#PLIMB^5n zR1{=R(ii`eCaLb`!z$aGtomnIU;3IIP0mDbBcGh7FccGDbjCgaYQaLC~2uxM>INhr^(i7Sa#5#kIsRQ^Ea=B=Bo?q_vq!81tdh7XQa!u+o;$ zZ)Pd|PA#V2IbQlLnv36>ZJ9ytq^;u4)B^4#%;wIFeC}K^3(gY{CEN-}^9e1>xNC1& z0T=Jqx2&Zwf8w+w;VEom^w{VQN^Dt%5bj0dHyICtb|fs}se-_H0rNf79b_3{zJ+SO zJlgUYfS@b;9az39^a{7q4L0%S7o&+v5djF8K^FEP!33%7a~>Fttth5;Z8=@C40eXl zeVB{ar3pecj+S^=^Y8V6*I>oT&`O8hoh%2E0_X!>nt$fanm_kumVr@F4m$o$9Y{(} zoV1qvq4+w2N@DMSn%vGoe*|lo`LZXVW|D4`w3>!=9sZKyEJI54;1Bd|xB^;D&G~y3 zs~)K}H~$`b&GSvYX7TSGY%!|V$g>6>eGeVsV^4&}6r1kWwHn$5dE9+=!U0=dxl_B@ zSy=9n6S%6gt)l!7>p(VoYRp3`ta_tEU1(SDaxO3Tuc)f7S_uuNaU?c{xUA+pJOS8H z@FxgRN%-S`7`;6<2gWgU%d^buH}7qkjzhop#^Qo3TygXq#-Zjx7tqLOB6y1e4gbN=5-iS)>5U445 z?iCi3kbPJ87gqO6FW~9Pe)Hj3x^|1&h_UQO-7r4>85qh6699AGX?#d#H*Kb=)XP+5 zxXcB!PhSp2zJNT1z9I3U78CL8@frCm3tBxc9c;ON9P-x^@}B6%Ni3Z(ANHJ;f*nbO zxzgd#FYvR?KmRWBcwo%xh7dxU4{IP)(Z}&O(ZgDNQAI^zb&hPWsL&H3J2qsP)(lmm zV{=Vm2U<|cvWT5WJ|@BgAxTx__8yuQWy9{bSni)kc}Z?4!I!#pYzBrr6RKM(DOPUQ zsI0@!{+k4N2Xnt~bt^9)qZLjloC>nXQ$h9^732^V&|f|zE@qSpvWW^n0?={mTer+@ zF#6~%o7v;X{!K6*-yQoRqSexXMAVvBAq$^QdAL;BF~f%H1Wbgu8w=kGsp*wcK67 zuH^12b_sWHWz)F3mQCVrkfn3?ZkEX1b!;R;EtJ>*cQ>)Gxx1Nt0hdtN9l}gvT@<^! zk-TqlZxeYBaPKzq?%`gNIkFeH_Yv~$;NHi``!C%41bH>?eTuvx?tPlPGWR}9-dgVE z68tsX+e)#^xc4A=7jthXdFOF2Cab{Q@N$$%2G3^j1X2yK3%Iw7ydw7=C$F7*yUBZc z8#S+oykB!KBk$+jdxE@O+{=aZZ*gxQ#U9|^e)8_&-XZe7z`eud-NC)Y__KfEUb5R1{w zm;z5iYb@qIGiI(Q;b1K04l~Bk*1(S1Nvs#%s)2+az~V{4T1ya8JuhU74?jhtV>%oN;_cv@9D@h2KnwC*&3c_hz7m-( zx$ZWLJR=1cW~0n?-2$sT8IcpNyUixMptZrDRZQu@K@#Vmw(V0|k9I4qmbSqIphW!f z%4REYK`~zf0c1Kpc)&{WPuFNjpz_~FY&(_zI?CS%0f^od$LN38KF#0_bK-CG@rIp@ zl%Rj9Wn+iI0Q0nAH0b_>e+R!YKLWAUK#0~{3tkc%vC7#4-eSA~hoN)$b7Cf|5@3}`{d=?Is&?2V(={(=C3^^?UPVVhQuPtZxPn7; zc6=7`F$2jdtddA0M__Z|ju}c4lIOg|lph1j90g;0e+B9H|KhKFJVnij8%)yQ_=F)7 znU2z#6Mx^c4aNLpTgU~d&$NXcqGHDSm%oQ7O~Ye?7tnt0_^Dwu)Hnyvl4J@8c03r5 z=MB;5_`t?ovL|)q;TEN6i%vO|8m;R6w-4w~%fw=n;<1g0r7WO_%0LC=6RC%!GEg=| zZhIE|{+8W2ME94Z^fV_)mx7uu1|{I_r0c${OT6y$jaKUy*LA?x24DMi-56}VsdUtR z=(=tx`DHB3MxAx=V=cOp{*vOG#*A)^@BjJt|IZ#M{jDJU%YO;N)A)AdYsYsS-(T@r zUlfFNd>7!m2Hy?%R^h9|w*}uL_@2hM8{fP5`taFz3PJ|HTzm!imf*Vu-#UDo@jZy| z*Z6kgYsL2=K8DW)Jf`8x$F~CCt@t+Mdl=sf_}cNki?18s*Z79;{a@`}3wRXO**>`- zsp4=!E!xm>nc3`R?z2_55QHk$ z3tp(=Z){OPK~N$h#TpA!O<^v^u_9bVZxrYhgQEm^X<||M zHXwq}B>F%+R)UN1;Kye4C}5=38(cRTWgF3(I6{7WQjI27i{7>(B>);Rm^eNwlb|Xfs#7`6# zQ8a4=krRe4Z0HU3(&V}csc|DtC1-*+llKJLrQVYUdAP#b|*J?MoICutB*ltNZO^7I3}B^5S!Bo7a ztkn+R)y|Raim3F|H_^7_T>|^8Mx69QvNv-~36rddztLZdU1auy>*|NalO?w!^&pTT zZ#xsZ>XWwp#;7p+L!;|RG;D^=k={)dKMr3(_b^%I&zD>b(r3s>)&XP zMiPzDf2TcXS9?*|q^bK^pcMnnB;*%@)OwE`Yb(A#Lh}^a%)g%R`84|>{(94Cf|O~NqnbxNt@Owl0MA}3CyQ7k2WHOR-Rh)#*(c$Gm~U29hr2vH2w779*C-GgJc)9X4C3z zQnu)J>!<&8{pdvl)x(_K z%{pXV-LfFtq8;(zTksw|{PvsiED3%ie7f_n!Ow@kd8wD3Na^rTqICEtQ~FK#PjmRw z;Nx{$mIwcI_)Bl}vhTt#gg+Gi?q7M?_b44cekO+PSmFhzpa*{#{FcSIr-NS&zZJd! z|6=$<;nQhb0epJb@zC{N+<(y6p!Jc)OufdUHfMzr_G4T&%#>u=WWna$RRuz| zs7cbMRv7wd-d~Nz8ZsJj1#%4D4^`WbGioz-2Ew_i@QrEVYY`@Wko#p?_`0<4qO|b!Y2n3b;TzJz^iwky&4<#$ zblO5X2R-8{{JE?QlADOn3fvRgA(v8wDv*o&^H97zrt9Qa>523Fp`;N$CoNnY3PtLu z>^NK!WY>gGx)UT9NL?c#iOSSPGhTpEN^9z3M$Q-`eTGIdfFsoZj5x5szN^9*QS>$hNaN>qxFj11cd3n&kEE0NUqqh54w*d9?6dC zFzY}r$S2Dp3$$!&0>ZTOuu@EZEgl)}%xTWXd-(=GM4Nc9+BW@gsvY9QY};zrYLm)Q zJ4A<8Ct4|K-ZXhHM=Y2YuLieNBA%93gZ2ZUK=-v)&1#fNk5@vjbhmBMtwMg!IJHeY zQG2vsp>`NEX;Yl&5${BYc%}OEcTQ4MlSDft*aqB4TP2*00|_&d}3r)S9nrgB0|YkdNg4r7AG5$aWFO^88T>0NQMnX7Dr=2Lx=uYouNOy zHkS0q%rgG45kxx-^cm<9O$J&-2MDuDe=7qG_%T+l*Rl!zFst$>lCc0X;L2k`BUIg( zjKx!FHF_Ydry|u!GgKJ~8@W}EwErxAtV`F1p-oTb< zv^g5>v0udiPVH4|g5f}u=DEwb_PCae#kkh|A^o;MFX&d(a9}GY1>>n16_|cC?jv76NKe^z-i+(zNhQGEhpw~Av27fj))D(_He;$h` zlCzpyS}(q2_MA&EVb|nG`gd9Nd-wj!rhL%|pq2`7SuIkmJDiWNBYhc{S-yY0?Jkwiy2ECOzLCjT+uV7V zUorp6s}@{+%`X;S`^)PVUBCE-CBM4yrlmLE^6O={-gf))6)W#pwffFAYkzas-Rti8 z?Y;Ni|G@eOAA0za4ZnNzv5k-a?-T7${{9b}o_hM3&0C&*ZtJ$^x9@o2#ho25?Rt6l zD|`O9_tihW*7^FM_w9e<&A0H#@-GM8eeeAbKK$t5p^rcL>*2rs{U4wH^Rv&{S6v|b zUI6;FKz!T%|F+xz%JToB1-kEiV}bs*`#K%{O3>YnZF~We%X+o`CHTSUp>>yJ_N{F^fT*E8qqU9y*}0Z z%9FFMbanB}be_B1}Iu5eDz{LFq1_VvuqQ?O@#X8S=r zA0yXS{dc21^V8eEH-Wz}1jt$SGxfD>R?qxQ{)V*t>-?K9?U|n`pR=y@vKhYx11Wb6 z!udGp@ZDpj;5p-Zgz4bo2=3_fRO~|`pCdg_ZO*RJ!-3T9tQi00%*wIvGdFF;Y-&~6 zlp_1+KnPppDXb(EiRET3mBMJn@V>Hes3z{H+2!scPpd#o&+ z&|@XBU?N!O4^?3=N}E$A=2YnZS$ePZD~;A7O-*3BA{f?-*-nERj;zx5u#BBGpavuj zR@sRIYJvg1q|qO%j#MT?iQw4QgwAd;DTU*akY3`$^Y65HWh8*Jgq@H`qamC?SxX&` z9>5v;4t5z^or@Ox&dwPhFV$<44GntC7t`Yq4SNSYtpi*JesXW#VYU7&vcZi0TQ5;53wyoT)|-j0;Pp31Eb#ZzaX&rlcz z&2!St6a#B!>}iS_rv5(U0bS$Nbfz8{AFhk()D&ZJ>Ia>$q(;YI(#ydtta58|CMWe+ zt51&^$1~wN-H;!w=LYIkb+B$`O7c(kwiFD*RztF_aU7ae2b=WbxFK_dSUqt-G0Pm; zaC5|zM2rzp!)mh1L&$*G9=F>Y&PIFN${y=d6-6x|f+b!qcU4GS8)8(JYNLFrLB z6(yK=Oidx)VUB#*8vgXMY|6y43TtLN!jkc=CQ;}8P<)0zMgKVt3lnE?0zDvs>%>>0 zE#KUs8BuzXrI({;J6XL_yAvp%aMT@@z?YSPWmn{%3Rt#8dmRer0`zPU$J2P4X}oV` zJZHk2RK|0ryp&JRo(2K*B#!dv*_f3_bg3@+Cjdm3%$(9r1WrnU(uV<*?gS`aoQjvE z;+SlVI#W{VRVg2nghBs&_|!H%7a%%KfDMQO^b8XVy1_sFG86IB1`yA4Q~o^o)R)Tv z%D)02I=E#u-aNZ46<-0L_*(@~dusqH_Xt3JdLE$smjTKj455&G2qYKFHD2#!(}4eU z_eE!1*%>Y;W=HuMVtbWI{QY9_ls%XV6zpwPZ^1p0(i;m@6^}kJ@eIJ^AQyhN%?eu8^sXLuMO~Cf`^Y#t4 z@E>jp^y7+I*|~YEaAPoIUhBMJDbd~eni*KJ+Vtn%-bt_eFz=;phQAi&pS)ww9X2z& z*XP{N%p`Bhf2g13i&NzvM)~b4_pdyiy|MD`l^66)|I!skjmfx?nY`zxdu(j_o}cZp z_b$J4_n$id#u$I8$r+uSm7vtnVHz-%A^ga9fZ04@ZofpLHrZ~%qCaKHu>01P;E8{YlO z?cOgOKzu*27w7=C0-J!1z&c z9l%E5K429<<(I-=2+Rf=foVV~zyXDT4HyLMUxxYsxvy)jq@UnScV9J5d}i{x;Q8)0 z7OjK~)<6bpy^MZPl76m{K;uZfrhg!Qm#pfhNpb5Om%`+1`1dw5WPXl@M`!~tp+9~x zE=##9P(F(CeS!K$eOe5DvO%BvPVHpM07?Jd4y)RJNZ`AijXRrrmpjN6@Gd^eZ{>IL z=NHvGLXJxvm9Bwqms@e4>wd!BC_F2?AvB9~#GAxB#r5J7;&ySD_=fnQ_?h^9$svuE zY9zmOmGr2zMS4;CL^@0UiM&u=CEqQtmp8~8<#zf1e3X=tCecww58hP z+Oyir+CJ@l?PG1A=LFB`9>1sF6ZPEXx!3csXQKz5vzhy=5j@A=%pc}|ZGX&ucTq>t zD95jzNmrSBmV2RloBM$KYT+?qpK!dyNwV})=^k*OC%-L!B!4ce;B&H4qbyPGQ+6tE zD#KJwU95(*gtkyyqkX1{o?_2=Xcuj9^bR6pRR@WNWHdlx1J=e!BoBQ|fm)vi;M+@bGPnaphg;wDz^n0`Ls_;MJkHuzjwYXJ$ zMSM*Z6pu1pnXmjpc}V%YlBJ%iUZggl*LSI#p@E^=XzfDnGHt%LR(nj_sU6hHJz>u^ zo;y7|J@iE;FAX&AI8Nlw;TpIoH=A3}ZR9p{S^Tm5sr(q;$Dhx)@z?T;`Q`j}eiz@# zf5d;sewzIZyVrh>eTF?@zu11c{YLwv_P^UF7KMvuJLWo`bG+af?kaN4bR}I6yPkAC z=i2Lf%e6+>AhZil3(pBJ2&z~jP8Y8dcY^=-#DijiG(?i5VyRN9lBP?#6qXXw9O-K5 zI_W0qR_Qm=z0yYMX=$spOFAqaFBi#ixmCVH{*8RE{1kMPqs)O--lhCmc}FQyMRk%| ztzM|sszEiP&QfQqSE#>G7pY6t73w|e1L`B{E*%#;bvilutvCFctm(ocm~?ugPG_Z z;h^wOArE@L2)5fQx5-z^JC#?Iead9DMyp|NGs zDruecAS`whEVe`1EA5vKNQb0PrCfQCTp$mXhs&tE9N)Qww}4;BcZj>i!IB_NgVnc4 zOQj8{*(ovECVS->kkmqXt^A<8L;gS>q~t3cX5lC-VG-uI9m)qvzRKDeqhEskTsQyx?{U`A|LHYuB7l{;XUyJ4A~%6{c-IwtPWLe>Tp=@NR?9^SV5MmYt;>yt9D}~=4pepe62tmjIn6LXe`u5 zVmvxDp=%xSX$vryFVdD^MOmgT*H&R(Tc>T%HfrtKCT+8}RokI;XuGw&S|@4hs5>fw SqY^kOfuj;QDuMrK3H(1XaR1u? literal 0 HcmV?d00001 diff --git a/reverse_engineering/node_modules/streamsearch/LICENSE b/reverse_engineering/node_modules/streamsearch/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/reverse_engineering/node_modules/streamsearch/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/reverse_engineering/node_modules/streamsearch/README.md b/reverse_engineering/node_modules/streamsearch/README.md new file mode 100644 index 0000000..6310c20 --- /dev/null +++ b/reverse_engineering/node_modules/streamsearch/README.md @@ -0,0 +1,87 @@ +Description +=========== + +streamsearch is a module for [node.js](http://nodejs.org/) that allows searching a stream using the Boyer-Moore-Horspool algorithm. + +This module is based heavily on the Streaming Boyer-Moore-Horspool C++ implementation by Hongli Lai [here](https://github.com/FooBarWidget/boyer-moore-horspool). + + +Requirements +============ + +* [node.js](http://nodejs.org/) -- v0.8.0 or newer + + +Installation +============ + + npm install streamsearch + +Example +======= + +```javascript + var StreamSearch = require('streamsearch'), + inspect = require('util').inspect; + + var needle = new Buffer([13, 10]), // CRLF + s = new StreamSearch(needle), + chunks = [ + new Buffer('foo'), + new Buffer(' bar'), + new Buffer('\r'), + new Buffer('\n'), + new Buffer('baz, hello\r'), + new Buffer('\n world.'), + new Buffer('\r\n Node.JS rules!!\r\n\r\n') + ]; + s.on('info', function(isMatch, data, start, end) { + if (data) + console.log('data: ' + inspect(data.toString('ascii', start, end))); + if (isMatch) + console.log('match!'); + }); + for (var i = 0, len = chunks.length; i < len; ++i) + s.push(chunks[i]); + + // output: + // + // data: 'foo' + // data: ' bar' + // match! + // data: 'baz, hello' + // match! + // data: ' world.' + // match! + // data: ' Node.JS rules!!' + // match! + // data: '' + // match! +``` + + +API +=== + +Events +------ + +* **info**(< _boolean_ >isMatch[, < _Buffer_ >chunk, < _integer_ >start, < _integer_ >end]) - A match _may_ or _may not_ have been made. In either case, a preceding `chunk` of data _may_ be available that did not match the needle. Data (if available) is in `chunk` between `start` (inclusive) and `end` (exclusive). + + +Properties +---------- + +* **maxMatches** - < _integer_ > - The maximum number of matches. Defaults to Infinity. + +* **matches** - < _integer_ > - The current match count. + + +Functions +--------- + +* **(constructor)**(< _mixed_ >needle) - Creates and returns a new instance for searching for a _Buffer_ or _string_ `needle`. + +* **push**(< _Buffer_ >chunk) - _integer_ - Processes `chunk`. The return value is the last processed index in `chunk` + 1. + +* **reset**() - _(void)_ - Resets internal state. Useful for when you wish to start searching a new/different stream for example. diff --git a/reverse_engineering/node_modules/streamsearch/lib/sbmh.js b/reverse_engineering/node_modules/streamsearch/lib/sbmh.js new file mode 100644 index 0000000..dbefbc1 --- /dev/null +++ b/reverse_engineering/node_modules/streamsearch/lib/sbmh.js @@ -0,0 +1,213 @@ +/* + Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool +*/ +var EventEmitter = require('events').EventEmitter, + inherits = require('util').inherits; + +function jsmemcmp(buf1, pos1, buf2, pos2, num) { + for (var i = 0; i < num; ++i, ++pos1, ++pos2) + if (buf1[pos1] !== buf2[pos2]) + return false; + return true; +} + +function SBMH(needle) { + if (typeof needle === 'string') + needle = new Buffer(needle); + var i, j, needle_len = needle.length; + + this.maxMatches = Infinity; + this.matches = 0; + + this._occ = new Array(256); + this._lookbehind_size = 0; + this._needle = needle; + this._bufpos = 0; + + this._lookbehind = new Buffer(needle_len); + + // Initialize occurrence table. + for (j = 0; j < 256; ++j) + this._occ[j] = needle_len; + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + if (needle_len >= 1) { + for (i = 0; i < needle_len - 1; ++i) + this._occ[needle[i]] = needle_len - 1 - i; + } +} +inherits(SBMH, EventEmitter); + +SBMH.prototype.reset = function() { + this._lookbehind_size = 0; + this.matches = 0; + this._bufpos = 0; +}; + +SBMH.prototype.push = function(chunk, pos) { + var r, chlen; + if (!Buffer.isBuffer(chunk)) + chunk = new Buffer(chunk, 'binary'); + chlen = chunk.length; + this._bufpos = pos || 0; + while (r !== chlen && this.matches < this.maxMatches) + r = this._sbmh_feed(chunk); + return r; +}; + +SBMH.prototype._sbmh_feed = function(data) { + var len = data.length, needle = this._needle, needle_len = needle.length; + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + var pos = -this._lookbehind_size, + last_needle_char = needle[needle_len - 1], + occ = this._occ, + lookbehind = this._lookbehind; + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needle_len) { + var ch = this._sbmh_lookup_char(data, pos + needle_len - 1); + + if (ch === last_needle_char + && this._sbmh_memcmp(data, pos, needle_len - 1)) { + this._lookbehind_size = 0; + ++this.matches; + if (pos > -this._lookbehind_size) + this.emit('info', true, lookbehind, 0, this._lookbehind_size + pos); + else + this.emit('info', true); + + this._bufpos = pos + needle_len; + return pos + needle_len; + } else + pos += occ[ch]; + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) + pos++; + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, lookbehind, 0, this._lookbehind_size); + this._lookbehind_size = 0; + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + var bytesToCutOff = this._lookbehind_size + pos; + + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, lookbehind, 0, bytesToCutOff); + } + + lookbehind.copy(lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff); + this._lookbehind_size -= bytesToCutOff; + + data.copy(lookbehind, this._lookbehind_size); + this._lookbehind_size += len; + + this._bufpos = len; + return len; + } + } + + if (pos >= 0) + pos += this._bufpos; + + // Lookbehind buffer is now empty. Perform Boyer-Moore-Horspool + // search with optimized character lookup code that only considers + // the current round's haystack data. + while (pos <= len - needle_len) { + var ch = data[pos + needle_len - 1]; + + if (ch === last_needle_char + && data[pos] === needle[0] + && jsmemcmp(needle, 0, data, pos, needle_len - 1)) { + ++this.matches; + if (pos > 0) + this.emit('info', true, data, this._bufpos, pos); + else + this.emit('info', true); + + this._bufpos = pos + needle_len; + return pos + needle_len; + } else + pos += occ[ch]; + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + if (pos < len) { + while (pos < len && (data[pos] !== needle[0] + || !jsmemcmp(data, pos, needle, 0, len - pos))) { + ++pos; + } + if (pos < len) { + data.copy(lookbehind, 0, pos, pos + (len - pos)); + this._lookbehind_size = len - pos; + } + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) + this.emit('info', false, data, this._bufpos, pos < len ? pos : len); + + this._bufpos = len; + return len; +}; + +SBMH.prototype._sbmh_lookup_char = function(data, pos) { + if (pos < 0) + return this._lookbehind[this._lookbehind_size + pos]; + else + return data[pos]; +} + +SBMH.prototype._sbmh_memcmp = function(data, pos, len) { + var i = 0; + + while (i < len) { + if (this._sbmh_lookup_char(data, pos + i) === this._needle[i]) + ++i; + else + return false; + } + return true; +} + +module.exports = SBMH; diff --git a/reverse_engineering/node_modules/streamsearch/package.json b/reverse_engineering/node_modules/streamsearch/package.json new file mode 100644 index 0000000..9dde543 --- /dev/null +++ b/reverse_engineering/node_modules/streamsearch/package.json @@ -0,0 +1,62 @@ +{ + "_args": [ + [ + "streamsearch@0.1.2", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "streamsearch@0.1.2", + "_id": "streamsearch@0.1.2", + "_inBundle": false, + "_integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=", + "_location": "/streamsearch", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "streamsearch@0.1.2", + "name": "streamsearch", + "escapedName": "streamsearch", + "rawSpec": "0.1.2", + "saveSpec": null, + "fetchSpec": "0.1.2" + }, + "_requiredBy": [ + "/ssh2-streams" + ], + "_resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", + "_spec": "0.1.2", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Brian White", + "email": "mscdex@mscdex.net" + }, + "bugs": { + "url": "https://github.com/mscdex/streamsearch/issues" + }, + "description": "Streaming Boyer-Moore-Horspool searching for node.js", + "engines": { + "node": ">=0.8.0" + }, + "homepage": "https://github.com/mscdex/streamsearch#readme", + "keywords": [ + "stream", + "horspool", + "boyer-moore-horspool", + "boyer-moore", + "search" + ], + "licenses": [ + { + "type": "MIT", + "url": "http://github.com/mscdex/streamsearch/raw/master/LICENSE" + } + ], + "main": "./lib/sbmh", + "name": "streamsearch", + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/mscdex/streamsearch.git" + }, + "version": "0.1.2" +} diff --git a/reverse_engineering/node_modules/string_decoder/LICENSE b/reverse_engineering/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/reverse_engineering/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/reverse_engineering/node_modules/string_decoder/README.md b/reverse_engineering/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/reverse_engineering/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/reverse_engineering/node_modules/string_decoder/lib/string_decoder.js b/reverse_engineering/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/reverse_engineering/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/string_decoder/package.json b/reverse_engineering/node_modules/string_decoder/package.json new file mode 100644 index 0000000..4e6db1b --- /dev/null +++ b/reverse_engineering/node_modules/string_decoder/package.json @@ -0,0 +1,62 @@ +{ + "_from": "string_decoder@^1.1.1", + "_id": "string_decoder@1.3.0", + "_inBundle": false, + "_integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "_location": "/string_decoder", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "string_decoder@^1.1.1", + "name": "string_decoder", + "escapedName": "string_decoder", + "rawSpec": "^1.1.1", + "saveSpec": null, + "fetchSpec": "^1.1.1" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "_shasum": "42f114594a46cf1a8e30b0a84f56c78c3edac21e", + "_spec": "string_decoder@^1.1.1", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/readable-stream", + "bugs": { + "url": "https://github.com/nodejs/string_decoder/issues" + }, + "bundleDependencies": false, + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "deprecated": false, + "description": "The string_decoder module from Node core", + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "files": [ + "lib" + ], + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT", + "main": "lib/string_decoder.js", + "name": "string_decoder", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "scripts": { + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "test": "tap test/parallel/*.js && node test/verify-dependencies" + }, + "version": "1.3.0" +} diff --git a/reverse_engineering/node_modules/tunnel-ssh/.eslintignore b/reverse_engineering/node_modules/tunnel-ssh/.eslintignore new file mode 100644 index 0000000..1616ee7 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/.eslintignore @@ -0,0 +1,4 @@ +node_modules/** +.idea/** +build/** +examples diff --git a/reverse_engineering/node_modules/tunnel-ssh/README.md b/reverse_engineering/node_modules/tunnel-ssh/README.md new file mode 100644 index 0000000..f5df064 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/README.md @@ -0,0 +1,139 @@ +Tunnel-SSH +========== + +One to connect them all ! + +![Tunnel-SSH Logo](http://i.imgur.com/I5PRnDD.jpg) + +Tunnel-ssh is based on the fantastic [ssh2](https://github.com/mscdex/ssh2) library by Brian White. +Trouble ? Please study the ssh2 configuration. + +### Latest Relese 4.1.3 + +## Release notes +* Closing sshconnections correctly thx @actionshrimp +* Improved readme +* Updated modules + +Special thanks to +@vweevers and @dickeyxxx + + +### Related projects +* [If you don't want to wrap a tunnel around your code: inject-tunnel-ssh](https://github.com/agebrock/inject-tunnel-ssh) +* [If you need it the other way around: reverse-tunnel-ssh](https://github.com/agebrock/reverse-tunnel-ssh) + +### Integration +By default tunnel-ssh will close the tunnel after a client disconnects, so your cli tools should work in the same way, they do if you connect directly. +If you need the tunnel to stay open, use the "keepAlive:true" option within +the configuration. + + +```js + + var config = { + ... + keepAlive:true + }; + + var tnl = tunnel(config, function(error, tnl){ + yourClient.connect(); + yourClient.disconnect(); + setTimeout(function(){ + // you only need to close the tunnel by yourself if you set the + // keepAlive:true option in the configuration ! + tnl.close(); + },2000); + }); + + // you can also close the tunnel from here... + setTimeout(function(){ + tnl.close(); + },2000); + +``` + + +## Understanding the configuration + +1. A local server listening for connections to forward via ssh +Description: This is where you bind your interface. +Properties: +** localHost (default is '127.0.0.1') +** localPort (default is dstPort) + + +2. The ssh configuration +Description: The host you want to use as ssh-tunnel server. +Properties: +** host +** port (22) +** username +** ... + + +3. The destination host configuration (based on the ssh host) +Imagine you just connected to The host you want to connect to. (via host:port) +now that server connects requires a target to tunnel to. +Properties: +** dstHost (localhost) +** dstPort + + +### Config example + +```js + + var config = { + username:'root', + Password:'secret', + host:sshServer, + port:22, + dstHost:destinationServer, + dstPort:27017, + localHost:'127.0.0.1', + localPort: 27000 + }; + + var tunnel = require('tunnel-ssh'); + tunnel(config, function (error, server) { + //.... + }); +``` +#### Sugar configuration + +tunnel-ssh assumes that you want to map the same port on a remote machine to your localhost using the ssh-server on the remote machine. + + +```js + + var config = { + username:'root', + dstHost:'remotehost.with.sshserver.com', + dstPort:27017, + privateKey:require(fs).readFileSync('/path/to/key'), + passphrase:'secret' + }; + +``` + +#### More configuration options +tunnel-ssh pipes the configuration direct into the ssh2 library so every config option provided by ssh2 still works. +[ssh2 configuration](https://github.com/mscdex/ssh2#client-methods) + + +#### catching errors: +```js + var tunnel = require('tunnel-ssh'); + //map port from remote 3306 to localhost 3306 + var server = tunnel({host: '172.16.0.8', dstPort: 3306}, function (error, server) { + if(error){ + //catch configuration and startup errors here. + } + }); + + // Use a listener to handle errors outside the callback + server.on('error', function(err){ + console.error('Something bad happened:', err); + }); +``` diff --git a/reverse_engineering/node_modules/tunnel-ssh/examples/default.js b/reverse_engineering/node_modules/tunnel-ssh/examples/default.js new file mode 100644 index 0000000..c54e4e3 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/examples/default.js @@ -0,0 +1,26 @@ +var tunnel = require('../'); +var helper = require('./server'); + + +/** +make sure you can connect to your own machine with the current user without password. +Example: ssh $USER@127.0.0.1 + +Remember to add your privateKey to your ssh-agent (ssh-add) +**/ + +var config = { + host: '127.0.0.1', username: process.env.USER, dstPort: 8000, localPort: 7000 +}; + +var fakeServer = helper.createServer(config.dstPort, '127.0.0.1', function () { + tunnel(config, function () { + console.log('Tunnel open'); + helper.createClient(7000, '127.0.0.1', console.log); + helper.createClient(7000, '127.0.0.1', console.log); + }).on('error', function (e) { + console.log('error', e); + }); +}); + +fakeServer.unref(); diff --git a/reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive.js b/reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive.js new file mode 100644 index 0000000..dbc4fe7 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive.js @@ -0,0 +1,35 @@ +var tunnel = require('../'); +var helper = require('./server'); + +// Keep alive example +// this example demonstrates the keepAlive option. +// keepAlive will reuse the connections +// note the "tunnelKeepAlive.close();" at the end. +// this step is required to finish execution nicely + +var configA = { + host: '127.0.0.1', username: process.env.USER, dstPort: 8000, localPort: 7000, // Use keepAlive:true to keep the tunnel open. + keepAlive: true +}; + +var fakeServer = helper.createServer(configA.dstPort, '127.0.0.1', function () { + var tunnelKeepAlive = tunnel(configA, function () { + console.log('Tunnel open'); + helper.createClient(7000, '127.0.0.1', console.log); + helper.createClient(7000, '127.0.0.1', console.log); + helper.createClient(7000, '127.0.0.1', console.log).on('close', function () { + helper.createClient(7000, '127.0.0.1', console.log).on('close', function () { + helper.createClient(7000, '127.0.0.1', console.log).on('close', function () { + setTimeout(function () { + // Call tunnel.close() to shutdown the server. + console.log('TRYING TO CLOSE'); + tunnelKeepAlive.close(); + }, 2000); + }); + }); + }); + }).on('error', function (e) { + console.log('error', e); + }); +}); +fakeServer.unref(); diff --git a/reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive_error.js b/reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive_error.js new file mode 100644 index 0000000..329551e --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/examples/keepAlive_error.js @@ -0,0 +1,35 @@ +var tunnel = require('../'); +var helper = require('./server'); + +// Keep alive example +// this example demonstrates the keepAlive option. +// keepAlive will reuse the connections +// note the "tunnelKeepAlive.close();" at the end. +// this step is required to finish execution nicely + +var configA = { + host: '127.0.0.1', + username: process.env.USER, + dstPort: 8000, + localPort: 7000, // Use keepAlive:true to keep the tunnel open. + keepAlive: true +}; +var tunnelKeepAlive = tunnel(configA, function () { + console.log('Tunnel open'); + helper.createClient(7000, '127.0.0.1', console.log); + helper.createClient(7000, '127.0.0.1', console.log); + helper.createClient(7000, '127.0.0.1', console.log).on('close', function () { + helper.createClient(7000, '127.0.0.1', console.log).on('close', function () { + helper.createClient(7000, '127.0.0.1', console.log).on('close', function () { + setTimeout(function () { + // Call tunnel.close() to shutdown the server. + console.log('TRYING TO CLOSE'); + tunnelKeepAlive.close(); + }, 2000); + }); + }); + }); +}).on('error', function (e) { + console.log('error', e); +}); + diff --git a/reverse_engineering/node_modules/tunnel-ssh/examples/loginError.js b/reverse_engineering/node_modules/tunnel-ssh/examples/loginError.js new file mode 100644 index 0000000..e5ffb41 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/examples/loginError.js @@ -0,0 +1,30 @@ +var tunnel = require('../'); +var helper = require('./server'); + + +/** +make sure you can connect to your own machine with the current user without password. +Example: ssh $USER@127.0.0.1 + +Remember to add your privateKey to your ssh-agent (ssh-add) +**/ + +var config = { + host: '127.0.0.1', username: 'foo', dstPort: 8000, localPort: 7000 +}; + +var fakeServer = helper.createServer(config.dstPort, '127.0.0.1', function () { + var srv = tunnel(config, function (server, error) { + server.sshConnection.on('error',(e) => console.log("first",e.message)) + console.log('Tunnel open', error); + helper.createClient(7000, '127.0.0.1', console.log); + helper.createClient(7000, '127.0.0.1', console.log); + }).on('error', function (e) { + console.log('error', e); + }); + +// srv.sshConnection.on('error',(e) => console.log(e.message)) + +}); + +fakeServer.unref(); diff --git a/reverse_engineering/node_modules/tunnel-ssh/examples/server/index.js b/reverse_engineering/node_modules/tunnel-ssh/examples/server/index.js new file mode 100644 index 0000000..6b44457 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/examples/server/index.js @@ -0,0 +1,36 @@ +var net = require('net'); +var debug = require('debug')('tunnel-ssh:test-server-client'); + +function createServer(port, addr, callback) { + var handleConnection = function (socket) { + socket.on('data', function (data) { + debug('server::data', data); + }); + debug('server::write'); + socket.write('Echo server\r\n'); + }; + + return net.createServer(handleConnection).listen(port, addr, callback); +} + +function createClient(port, addr, callback) { + var client = new net.Socket(); + + client.on('error', function (e) { + console.log('errortest', e); + }); + + client.connect(port, addr, function () { + debug('client::write'); + client.write('alive !'); + setTimeout(function () { + client.end(); + debug('client::end'); + callback(null, true); + }, 300); + }); + return client; +} + +exports.createServer = createServer; +exports.createClient = createClient; diff --git a/reverse_engineering/node_modules/tunnel-ssh/index.js b/reverse_engineering/node_modules/tunnel-ssh/index.js new file mode 100644 index 0000000..b3744e5 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/index.js @@ -0,0 +1,101 @@ +var net = require('net'); +var debug = require('debug')('tunnel-ssh'); +var Connection = require('ssh2'); +var createConfig = require('./lib/config'); +var events = require('events'); +var noop = function () { +}; + +function bindSSHConnection(config, netConnection) { + var sshConnection = new Connection(); + netConnection.on('close', sshConnection.end.bind(sshConnection)); + + sshConnection.on('ready', function () { + debug('sshConnection:ready'); + netConnection.emit('sshConnection', sshConnection, netConnection); + sshConnection.forwardOut(config.srcHost, config.srcPort, config.dstHost, config.dstPort, function (err, sshStream) { + if (err) { + // Bubble up the error => netConnection => server + netConnection.emit('error', err); + debug('Destination port:', err); + return; + } + + debug('sshStream:create'); + netConnection.emit('sshStream', sshStream); + netConnection.pipe(sshStream).pipe(netConnection); + }); + }); + return sshConnection; +} + +function createServer(config) { + var server; + var connections = []; + var connectionCount = 0; + + server = net.createServer(function (netConnection) { + var sshConnection; + connectionCount++; + netConnection.on('error', server.emit.bind(server, 'error')); + netConnection.on('close', function () { + connectionCount--; + if (connectionCount === 0) { + if (!config.keepAlive) { + setTimeout(function () { + if (connectionCount === 0) { + server.close(); + } + }, 2); + } + } + }); + + server.emit('netConnection', netConnection, server); + sshConnection = bindSSHConnection(config, netConnection); + sshConnection.on('error', server.emit.bind(server, 'error')); + + netConnection.on('sshStream', function (sshStream) { + sshStream.on('error', function () { + server.close(); + }); + }); + + connections.push(sshConnection, netConnection); + sshConnection.connect(config); + }); + + server.on('close', function () { + connections.forEach(function (connection) { + connection.end(); + }); + }); + + return server; +} + +function tunnel(configArgs, callback) { + var server; + var config; + + if (!callback) { + callback = noop; + } + try { + config = createConfig(configArgs); + server = createServer(config); + + server.listen(config.localPort, config.localHost, function (error) { + callback(error, server); + }); + } catch (e) { + server = new events.EventEmitter(); + setImmediate(function () { + callback(e); + server.emit('error', e); + }); + } + return server; +} + +module.exports = tunnel; diff --git a/reverse_engineering/node_modules/tunnel-ssh/lib/config.js b/reverse_engineering/node_modules/tunnel-ssh/lib/config.js new file mode 100644 index 0000000..99b1996 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/lib/config.js @@ -0,0 +1,53 @@ +var util = require('util'); +var defaults = require('lodash.defaults'); +var debug = require('debug')('tunnel-ssh-config'); + +var ConfigError = function (message, extra) { + Error.captureStackTrace(this, this.constructor); + this.name = this.constructor.name; + this.message = message; + this.extra = extra; +}; + +util.inherits(ConfigError, Error); + +function createConfig(config) { + var env = process.env; + + defaults(config || {}, { + username: env.TUNNELSSH_USER || env.USER || env.USERNAME || 'root', + port: 22, + host: null, + srcPort: 0, + srcHost: '127.0.0.1', + dstPort: null, + dstHost: '127.0.0.1', + localHost: '127.0.0.1', + localPort: config.dstPort, + agent: process.env.SSH_AUTH_SOCK + }); + + if (!config.host) { + throw new ConfigError('host not set'); + } + + if (!config.dstPort) { + throw new ConfigError('dstPort not set'); + } + debug('ssh-config', (function () { + var hiddenValues = ['password', 'privateKey']; + + return Object.keys(config).reduce(function (obj, key) { + if (hiddenValues.indexOf(key) === -1) { + obj[key] = config[key]; + } else { + obj[key] = '***HIDDEN***'; + } + return obj; + }, {}); + })()); + + return config; +} + +module.exports = createConfig; diff --git a/reverse_engineering/node_modules/tunnel-ssh/package.json b/reverse_engineering/node_modules/tunnel-ssh/package.json new file mode 100644 index 0000000..adf4939 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/package.json @@ -0,0 +1,80 @@ +{ + "_args": [ + [ + "tunnel-ssh@4.1.4", + "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" + ] + ], + "_from": "tunnel-ssh@4.1.4", + "_id": "tunnel-ssh@4.1.4", + "_inBundle": false, + "_integrity": "sha512-CjBqboGvAbM7iXSX2F95kzoI+c2J81YkrHbyyo4SWNKCzU6w5LfEvXBCHu6PPriYaNvfhMKzD8bFf5Vl14YTtg==", + "_location": "/tunnel-ssh", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "tunnel-ssh@4.1.4", + "name": "tunnel-ssh", + "escapedName": "tunnel-ssh", + "rawSpec": "4.1.4", + "saveSpec": null, + "fetchSpec": "4.1.4" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/tunnel-ssh/-/tunnel-ssh-4.1.4.tgz", + "_spec": "4.1.4", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", + "author": { + "name": "Christoph Hagenbrock", + "email": "christoph.hagenbrock@googlemail.com" + }, + "bugs": { + "url": "https://github.com/agebrock/tunnel-ssh/issues" + }, + "dependencies": { + "debug": "2.6.9", + "lodash.defaults": "^4.1.0", + "ssh2": "0.5.4" + }, + "description": "Easy extendable SSH tunnel", + "devDependencies": { + "chai": "3.5.0", + "eslint": "^3.2.2", + "eslint-config-xo": "^0.17.0", + "mocha": "^3.5.3" + }, + "eslintConfig": { + "extends": "xo", + "env": { + "mocha": true + }, + "rules": { + "indent": [ + "error", + 4 + ] + } + }, + "homepage": "https://github.com/agebrock/tunnel-ssh#readme", + "keywords": [ + "tunnel", + "ssh", + "mysql", + "develop", + "net" + ], + "license": "MIT", + "main": "index.js", + "name": "tunnel-ssh", + "repository": { + "type": "git", + "url": "git+https://github.com/agebrock/tunnel-ssh.git" + }, + "scripts": { + "test": "mocha && eslint ." + }, + "version": "4.1.4" +} diff --git a/reverse_engineering/node_modules/tunnel-ssh/rewrite.js b/reverse_engineering/node_modules/tunnel-ssh/rewrite.js new file mode 100644 index 0000000..f952634 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/rewrite.js @@ -0,0 +1,138 @@ +const events = require('events'); +const sshClient = require('ssh2'); +const net = require('net'); +const debug = require('debug')('tunnel-ssh'); +const createConfig = require('./lib/config'); + +class Tunnel extends events.EventEmitter { + constructor(config) { + super(); + this.config = config; + // Expose sshClient for external event-bindings + // @TODO exclude into separate util function + this.sshClient = sshClient(); + this.sshClient.promise = new Promise((resolve, reject) => { + this.sshClient + .on('ready', () => resolve(this.sshClient)) + .on('error', error => reject(error)) + .connect(config); + }); + } + + /** + * Creates a dublex stream + * @returns {Promise.} + */ + getStream(srcHost, srcPort, dstHost, dstPort) { + // @todo implement old behavior "create a new client for every connection" + return this.sshClient.promise.then(client => { + return new Promise((resolve, reject) => { + return client.forwardOut( + srcHost, + srcPort, + dstHost, + dstPort, (error, sshConnection) => { + if (error) { + this.emit('error', error); + return reject(error); + } + return resolve(sshConnection); + }); + }); + }); + } + + /** + * Creates a tcp server as entry point for the ssh tunnel, + * every incoming tcp connection is piped to the tunnel. + * @returns {Promise.} + */ + listen(port, addr, srcHost, srcPort, dstHost, dstPort) { + let server = net.createServer(); + server.promise = new Promise((resolve, reject) => { + server.on('listening', () => resolve(server)); + server.on('error', error => reject(error)); + }); + + server.promise.catch(e => { + this.emit('error', e); + }); + + server.on('connection', tcpConnection => { + this.getStream(srcHost, srcPort, dstHost, dstPort).then(sshConnection => { + debug('sshConnection:create'); + this.emit('sshConnection', sshConnection); + if (this.config.exitOnLastConnectionEnd === true) { + tcpConnection.on('close', () => server.getConnections((error, count) => { + if (error) { + this.emit('error', error); + } + debug('ConnectionCount => ' + count); + if (count === 0) { + server.close(); + } + })); + } + tcpConnection.pipe(sshConnection).pipe(tcpConnection); + sshConnection.on('end', () => console.log('ssh-connection end')); + sshConnection.on('close', () => console.log('ssh-connection close')); + }).catch(error => this.emit('error', error)); + }); + + server.on('close', () => { + debug('server::close'); + this.sshClient.end(); + }); + + return this.sshClient.promise.then(() => { + server.listen(port, addr); + return server; + }).catch(error => this.emit('error', error)); + } +} + +function tunnel(rawConfig) { + let config = createConfig(rawConfig); + + let tnl = new Tunnel(config); + tnl.listen( + config.bindPort, + config.bindAddr, + config.srcHost, + config.srcPort, + config.dstHost, + config.dstPort + ).then(server => { + server.on('listening', function () { + console.log('listening'); + }); + }).catch(error => this.emit('error', error)); + + Promise.all(config.ports.map(port => { + return tnl.listen( + port, + config.bindAddr, + config.srcHost, + config.srcPort, + config.dstHost, + port + ); + })).then(() => console.log('done')).catch(error => console.log(error)) + + return tnl; +} +/* + var http = require('http'); + var s = http.createServer(function (req, res) { + res.setHeader('Content-Type', 'text/html'); + res.setHeader('X-Foo', 'bar'); + res.writeHead(200, {'Content-Type': 'text/plain'}); + res.write('foo'); + res.end('ok'); + }); + s.listen(7000); + s.unref(); + */ +tunnel({ + host: 'pi', user: 'pi', passphrase: '*gold12', dstPort: 8080, bindPort: 8082, ports: [8081] +}).on('error', error => console.log(error)); diff --git a/reverse_engineering/node_modules/tunnel-ssh/test/config-spec.js b/reverse_engineering/node_modules/tunnel-ssh/test/config-spec.js new file mode 100644 index 0000000..24229da --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/test/config-spec.js @@ -0,0 +1,56 @@ +var chai = require('chai'); +var expect = chai.expect; +var createConfig = require('../lib/config'); + +describe('config', function () { + it('use dstPort as localPort', function () { + var config = { + host: 'test.host', dstPort: 8000 + }; + expect(createConfig(config).localPort).to.be.equal(8000); + }); + + it('should emit an error', function () { + var config = { + host: 'remoteHost' + }; + expect(createConfig.bind(null, config)).to.throw('dstPort not set'); + }); + + it('throws an error if host is missing', function () { + var config = { + dstPort: 8000 + }; + expect(createConfig.bind(null, config)).to.throw('host not set'); + }); +}); + +/* + // Keep alive + var configA = { + host: '127.0.0.1', + username: process.env.USER, + dstPort: 8000, + localPort: 7000, + // Use keepAlive:true to keep the tunnel open. + keepAlive: true + }; + var tunnelKeepAlive = tunnel(configA, function() { + console.log('Tunnel open'); + helper.createClient(7000, '127.0.0.1', console.log).on('close', function() { + helper.createClient(7000, '127.0.0.1', console.log).on('close', function() { + helper.createClient(7000, '127.0.0.1', console.log).on('close', function() { + setTimeout(function() { + // Call tunnel.close() to shutdown the server. + console.log('TRYING TO CLOSE'); + tunnelKeepAlive.close(); + }, 2000); + }); + }); + }); + }).on('error', function(e) { + console.log('error', e); + }); + }); + + */ diff --git a/reverse_engineering/node_modules/tunnel-ssh/test/main-spec.js b/reverse_engineering/node_modules/tunnel-ssh/test/main-spec.js new file mode 100644 index 0000000..c95d2fa --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/test/main-spec.js @@ -0,0 +1,37 @@ +var tunnel = require('../'); +var helper = require('./server'); +var chai = require('chai'); +var expect = chai.expect; + +describe('tunnel-ssh', function () { + it('should emit an error', function (done) { + var config = { + host: '127.0.0.1', username: process.env.USER, dstPort: 8000, localPort: 7000 + }; + + tunnel(config, function () { + helper.createClient(7000, '127.0.0.1', done); + }).on('error', function (e) { + expect(e).to.be.instanceOf(Error); + done(null); + }); + }); + + it('brokenConfig, should callback an error', function (done) { + var brokenConfig = {}; + + tunnel(brokenConfig, function (e) { + expect(e).to.be.instanceOf(Error); + done(); + }); + }); + + it('brokenConfig, should emit an error', function (done) { + var brokenConfig = {}; + + tunnel(brokenConfig).on('error', function (e) { + expect(e).to.be.instanceOf(Error); + done(null); + }); + }); +}); diff --git a/reverse_engineering/node_modules/tunnel-ssh/test/server/index.js b/reverse_engineering/node_modules/tunnel-ssh/test/server/index.js new file mode 100644 index 0000000..2988288 --- /dev/null +++ b/reverse_engineering/node_modules/tunnel-ssh/test/server/index.js @@ -0,0 +1,36 @@ +var net = require('net'); +var debug = require('debug')('tunnel-ssh:test-server-client'); + +function createServer(port, addr, callback) { + var handleConnection = function (socket) { + socket.on('data', function (data) { + debug('server::data', data); + }); + debug('server::write'); + socket.write('Echo server\r\n'); + }; + + return net.createServer(handleConnection).listen(port, addr, callback); +} + +function createClient(port, addr, callback) { + var client = new net.Socket(); + + client.on('error', function (e) { + debug('clientError', e); + }); + + client.connect(port, addr, function () { + debug('client::write'); + client.write('alive !'); + setTimeout(function () { + client.end(); + debug('client::end'); + callback(null, true); + }, 300); + }); + return client; +} + +exports.createServer = createServer; +exports.createClient = createClient; diff --git a/reverse_engineering/node_modules/util-deprecate/History.md b/reverse_engineering/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/reverse_engineering/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/reverse_engineering/node_modules/util-deprecate/LICENSE b/reverse_engineering/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/reverse_engineering/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/reverse_engineering/node_modules/util-deprecate/README.md b/reverse_engineering/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/reverse_engineering/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/reverse_engineering/node_modules/util-deprecate/browser.js b/reverse_engineering/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/reverse_engineering/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/reverse_engineering/node_modules/util-deprecate/node.js b/reverse_engineering/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/reverse_engineering/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/reverse_engineering/node_modules/util-deprecate/package.json b/reverse_engineering/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..210abf3 --- /dev/null +++ b/reverse_engineering/node_modules/util-deprecate/package.json @@ -0,0 +1,56 @@ +{ + "_from": "util-deprecate@^1.0.1", + "_id": "util-deprecate@1.0.2", + "_inBundle": false, + "_integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "_location": "/util-deprecate", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "util-deprecate@^1.0.1", + "name": "util-deprecate", + "escapedName": "util-deprecate", + "rawSpec": "^1.0.1", + "saveSpec": null, + "fetchSpec": "^1.0.1" + }, + "_requiredBy": [ + "/readable-stream" + ], + "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", + "_spec": "util-deprecate@^1.0.1", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/readable-stream", + "author": { + "name": "Nathan Rajlich", + "email": "nathan@tootallnate.net", + "url": "http://n8.io/" + }, + "browser": "browser.js", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The Node.js `util.deprecate()` function with browser support", + "homepage": "https://github.com/TooTallNate/util-deprecate", + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "license": "MIT", + "main": "node.js", + "name": "util-deprecate", + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "version": "1.0.2" +} diff --git a/reverse_engineering/node_modules/xtend/.jshintrc b/reverse_engineering/node_modules/xtend/.jshintrc new file mode 100644 index 0000000..77887b5 --- /dev/null +++ b/reverse_engineering/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/reverse_engineering/node_modules/xtend/LICENSE b/reverse_engineering/node_modules/xtend/LICENSE new file mode 100644 index 0000000..0099f4f --- /dev/null +++ b/reverse_engineering/node_modules/xtend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/reverse_engineering/node_modules/xtend/README.md b/reverse_engineering/node_modules/xtend/README.md new file mode 100644 index 0000000..4a2703c --- /dev/null +++ b/reverse_engineering/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: "c" +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licensed + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/reverse_engineering/node_modules/xtend/immutable.js b/reverse_engineering/node_modules/xtend/immutable.js new file mode 100644 index 0000000..94889c9 --- /dev/null +++ b/reverse_engineering/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/reverse_engineering/node_modules/xtend/mutable.js b/reverse_engineering/node_modules/xtend/mutable.js new file mode 100644 index 0000000..72debed --- /dev/null +++ b/reverse_engineering/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/reverse_engineering/node_modules/xtend/package.json b/reverse_engineering/node_modules/xtend/package.json new file mode 100644 index 0000000..2d4046d --- /dev/null +++ b/reverse_engineering/node_modules/xtend/package.json @@ -0,0 +1,86 @@ +{ + "_from": "xtend@^4.0.0", + "_id": "xtend@4.0.2", + "_inBundle": false, + "_integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "_location": "/xtend", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "xtend@^4.0.0", + "name": "xtend", + "escapedName": "xtend", + "rawSpec": "^4.0.0", + "saveSpec": null, + "fetchSpec": "^4.0.0" + }, + "_requiredBy": [ + "/postgres-interval" + ], + "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "_shasum": "bb72779f5fa465186b1f438f674fa347fdb5db54", + "_spec": "xtend@^4.0.0", + "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/postgres-interval", + "author": { + "name": "Raynos", + "email": "raynos2@gmail.com" + }, + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "extend like a boss", + "devDependencies": { + "tape": "~1.1.0" + }, + "engines": { + "node": ">=0.4" + }, + "homepage": "https://github.com/Raynos/xtend", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "license": "MIT", + "main": "immutable", + "name": "xtend", + "repository": { + "type": "git", + "url": "git://github.com/Raynos/xtend.git" + }, + "scripts": { + "test": "node test" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "version": "4.0.2" +} diff --git a/reverse_engineering/node_modules/xtend/test.js b/reverse_engineering/node_modules/xtend/test.js new file mode 100644 index 0000000..b895b42 --- /dev/null +++ b/reverse_engineering/node_modules/xtend/test.js @@ -0,0 +1,103 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("prototype pollution", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + extend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) + +test("prototype pollution mutable", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + mutableExtend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) diff --git a/reverse_engineering/package-lock.json b/reverse_engineering/package-lock.json index 054677f..ea1046c 100644 --- a/reverse_engineering/package-lock.json +++ b/reverse_engineering/package-lock.json @@ -4,16 +4,6 @@ "lockfileVersion": 1, "requires": true, "dependencies": { - "@types/geojson": { - "version": "7946.0.7", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.7.tgz", - "integrity": "sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ==" - }, - "@types/node": { - "version": "14.14.35", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.35.tgz", - "integrity": "sha512-Lt+wj8NVPx0zUmUwumiVXapmaLUcAk3yPuHCFVXras9k5VT9TdhJqKqGVUQCD60OTMCl0qxJ57OiTL0Mic3Iag==" - }, "asn1": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", @@ -22,6 +12,11 @@ "safer-buffer": "~2.1.0" } }, + "buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" + }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", @@ -30,69 +25,118 @@ "ms": "2.0.0" } }, - "denque": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz", - "integrity": "sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ==" - }, - "iconv-lite": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.2.tgz", - "integrity": "sha512-2y91h5OpQlolefMPmUlivelittSWy0rP+oYVpn6A7GwVHNE8AWzoYOBNmlwks3LobaJxgHCYZAnyNo2GgpNRNQ==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - } + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "lodash.defaults": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" }, - "long": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz", - "integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==" + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" }, - "mariadb": { - "version": "2.5.3", - "resolved": "https://registry.npmjs.org/mariadb/-/mariadb-2.5.3.tgz", - "integrity": "sha512-9ZbQ1zLqasLCQy6KDcPHtX7EUIMBlQ8p64gNR61+yfpCIWjPDji3aR56LvwbOz1QnQbVgYBOJ4J/pHoFN5MR+w==", + "packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "pg": { + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz", + "integrity": "sha512-7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==", "requires": { - "@types/geojson": "^7946.0.7", - "@types/node": "^14.14.28", - "denque": "^1.4.1", - "iconv-lite": "^0.6.2", - "long": "^4.0.0", - "moment-timezone": "^0.5.33", - "please-upgrade-node": "^3.2.0" + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.4.1", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" } }, - "moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==" + "pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" }, - "moment-timezone": { - "version": "0.5.33", - "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.33.tgz", - "integrity": "sha512-PTc2vcT8K9J5/9rDEPe5czSIKgLoGsH8UNpA4qZTVw0Vd/Uz19geE9abbIOQKaAQFcnQ3v5YEXrbSc5BpshH+w==", + "pg-pool": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz", + "integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==" + }, + "pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", "requires": { - "moment": ">= 2.9.0" + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" } }, - "ms": { + "pgpass": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz", + "integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==", + "requires": { + "split2": "^3.1.1" + } + }, + "postgres-array": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" + }, + "postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==" + }, + "postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "requires": { + "xtend": "^4.0.0" + } }, - "please-upgrade-node": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz", - "integrity": "sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==", + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "requires": { - "semver-compare": "^1.0.0" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" } }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -103,10 +147,13 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" }, - "semver-compare": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", - "integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w=" + "split2": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "requires": { + "readable-stream": "^3.0.0" + } }, "ssh2": { "version": "0.5.4", @@ -131,6 +178,14 @@ "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", "integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=" }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, "tunnel-ssh": { "version": "4.1.4", "resolved": "https://registry.npmjs.org/tunnel-ssh/-/tunnel-ssh-4.1.4.tgz", @@ -140,6 +195,16 @@ "lodash.defaults": "^4.1.0", "ssh2": "0.5.4" } + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" } } } diff --git a/reverse_engineering/package.json b/reverse_engineering/package.json index 74e86eb..91a0a68 100644 --- a/reverse_engineering/package.json +++ b/reverse_engineering/package.json @@ -4,7 +4,7 @@ "description": "", "author": "Hackolade", "dependencies": { - "mariadb": "^2.5.3", + "pg": "^8.7.1", "tunnel-ssh": "^4.1.4" }, "installed": true From 9fcc4ce35b2edf17a3eca6cd12be2089fb2561aa Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 21 Sep 2021 18:02:41 +0300 Subject: [PATCH 02/69] RE: added logic to show schemas with tables --- reverse_engineering/api.js | 113 ++++++++++++++---- .../helpers/connectionHelper.js | 100 ++++++++++++++++ reverse_engineering/helpers/db.js | 30 +++++ .../helpers/postgresService.js | 72 +++++++++++ reverse_engineering/helpers/queryConstants.js | 5 + 5 files changed, 295 insertions(+), 25 deletions(-) create mode 100644 reverse_engineering/helpers/connectionHelper.js create mode 100644 reverse_engineering/helpers/db.js create mode 100644 reverse_engineering/helpers/postgresService.js create mode 100644 reverse_engineering/helpers/queryConstants.js diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index e08ec6c..65fbe8f 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,38 +1,101 @@ 'use strict'; +const postgresService = require('./helpers/postgresService'); + module.exports = { - disconnect(connectionInfo, logger, callback, app) { + async disconnect(connectionInfo, logger, callback, app) { + await postgresService.disconnect(); + + callback(); + }, + + async testConnection(connectionInfo, logger, callback, app) { + try { + logger.clear(); + logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); + + const postgresLogger = createLogger({ + title: 'Test connection instance log', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); + + await postgresService.connect(connectionInfo, postgresLogger); + await postgresService.pingDb(); + callback(); + } catch (error) { + callback(prepareError(error)); + } finally { + await postgresService.disconnect(); + } + }, - }, + async getDbCollectionsNames(connectionInfo, logger, callback, app) { + try { + logger.clear(); + logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); - async testConnection(connectionInfo, logger, callback, app) { + const postgresLogger = createLogger({ + title: 'Test connection log', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); - }, + await postgresService.setDependencies(app); + await postgresService.connect(connectionInfo, postgresLogger); + const schemasNames = await postgresService.getAllSchemasNames(); - async getDbCollectionsNames(connectionInfo, logger, callback, app) { + const collections = await schemasNames.reduce(async (next, dbName) => { + const result = await next; + try { + const dbCollections = await postgresService.getTablesNames(dbName); - }, + return result.concat({ + dbName, + dbCollections, + isEmpty: dbCollections.length === 0, + }); + } catch (error) { + postgresLogger.info(`Error reading database "${dbName}"`); + postgresLogger.error(error); - async getDbCollectionsData(data, logger, callback, app) { - - }, + return result.concat({ + dbName, + dbCollections: [], + isEmpty: true, + status: true, + }); + } + }, Promise.resolve([])); + + callback(null, collections); + } catch (error) { + callback(prepareError(error)); + } finally { + await postgresService.disconnect(); + } + }, + + async getDbCollectionsData(data, logger, callback, app) {}, }; const createLogger = ({ title, logger, hiddenKeys }) => { - return { - info(message) { - logger.log('info', { message }, title, hiddenKeys); - }, - - progress(message, dbName = '', tableName = '') { - logger.progress({ message, containerName: dbName, entityName: tableName }); - }, - - error(error) { - logger.log('error', { - message: error.message, - stack: error.stack, - }, title); - } - }; + return { + info(message, additionalData = {}) { + logger.log('info', { message, ...additionalData }, title, hiddenKeys); + }, + + progress(message, dbName = '', tableName = '') { + logger.progress({ message, containerName: dbName, entityName: tableName }); + }, + + error(error) { + logger.log('error', prepareError(error), title); + }, + }; }; + +const prepareError = error => ({ + message: error.message, + stack: error.stack, +}); diff --git a/reverse_engineering/helpers/connectionHelper.js b/reverse_engineering/helpers/connectionHelper.js new file mode 100644 index 0000000..e3d6deb --- /dev/null +++ b/reverse_engineering/helpers/connectionHelper.js @@ -0,0 +1,100 @@ +const Pool = require('pg').Pool; +const fs = require('fs'); +const ssh = require('tunnel-ssh'); + +const getSshConfig = info => { + const config = { + username: info.ssh_user, + host: info.ssh_host, + port: info.ssh_port, + dstHost: info.host, + dstPort: info.port, + localHost: '127.0.0.1', + localPort: info.port, + keepAlive: true, + }; + + if (info.ssh_method === 'privateKey') { + return Object.assign({}, config, { + privateKey: fs.readFileSync(info.ssh_key_file), + passphrase: info.ssh_key_passphrase, + }); + } else { + return Object.assign({}, config, { + password: info.ssh_password, + }); + } +}; + +const connectViaSsh = info => + new Promise((resolve, reject) => { + ssh(getSshConfig(info), (err, tunnel) => { + if (err) { + reject(err); + } else { + resolve({ + tunnel, + info: Object.assign({}, info, { + host: '127.0.0.1', + }), + }); + } + }); + }); + +const getSslOptions = connectionInfo => { + if (connectionInfo.sslType === 'Off') { + return false; + } + + if (connectionInfo.sslType === 'Unvalidated') { + return { + rejectUnauthorized: false, + }; + } + + if (connectionInfo.sslType === 'TRUST_CUSTOM_CA_SIGNED_CERTIFICATES') { + return { + ca: fs.readFileSync(connectionInfo.certAuthority), + }; + } + + if (connectionInfo.sslType === 'TRUST_SERVER_CLIENT_CERTIFICATES') { + return { + ca: fs.readFileSync(connectionInfo.certAuthority), + cert: fs.readFileSync(connectionInfo.clientCert), + key: fs.readFileSync(connectionInfo.clientPrivateKey), + }; + } +}; + +const createConnectionPool = async connectionInfo => { + let sshTunnel = null; + + if (connectionInfo.ssh) { + const { info, tunnel } = await connectViaSsh(connectionInfo); + sshTunnel = tunnel; + connectionInfo = info; + } + + const config = { + host: connectionInfo.host, + user: connectionInfo.userName, + password: connectionInfo.userPassword, + port: connectionInfo.port, + keepAlive: true, + ssl: getSslOptions(connectionInfo), + connectionTimeoutMillis: Number(connectionInfo.queryRequestTimeout) || 60000, + query_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, + statement_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, + database: connectionInfo.databaseName, + }; + + const pool = await new Pool(config); + + return { pool, sshTunnel }; +}; + +module.exports = { + createConnectionPool, +}; diff --git a/reverse_engineering/helpers/db.js b/reverse_engineering/helpers/db.js new file mode 100644 index 0000000..36627a1 --- /dev/null +++ b/reverse_engineering/helpers/db.js @@ -0,0 +1,30 @@ +let pool = null; +let logger = null; + +module.exports = { + initializePool(newPool, newLogger) { + pool = newPool; + logger = newLogger; + + pool.on('error', error => newLogger.error(error)); + }, + + async releasePool() { + if (pool) { + await pool.end(); + pool = null; + } + }, + + async query(query, params) { + logger.info('Execute query', { query, params }); + + const start = Date.now(); + const result = await pool.query(query, params); + const duration = Date.now() - start; + + logger.info('Query executed', { query, params, duration, rowsCount: result.rowCount }); + + return result; + }, +}; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js new file mode 100644 index 0000000..fd38c0e --- /dev/null +++ b/reverse_engineering/helpers/postgresService.js @@ -0,0 +1,72 @@ +const { createConnectionPool } = require('./connectionHelper'); +const db = require('./db'); +const queryConstants = require('./queryConstants'); + +let currentSshTunnel = null; +let _ = null; + +module.exports = { + setDependencies(app) { + _ = app.require('lodash'); + }, + + async connect(connectionInfo, logger) { + const { pool, sshTunnel } = await createConnectionPool(connectionInfo); + + db.initializePool(pool, logger); + currentSshTunnel = sshTunnel; + }, + + async disconnect() { + if (currentSshTunnel) { + currentSshTunnel.close(); + currentSshTunnel = null; + } + + await db.releasePool(); + }, + + pingDb() { + return db.query(queryConstants.PING); + }, + + async getAllSchemasNames() { + const result = await db.query(queryConstants.GET_SCHEMA_NAMES); + + return result.rows + .map(({ schema_name }) => schema_name) + .filter(schemaName => !this._isSystemSchema(schemaName)); + }, + + async getTablesNames(schemaName) { + const result = await db.query(queryConstants.GET_TABLE_NAMES, [schemaName]); + + const tableTypesToExclude = ['FOREIGN TABLE']; + + return result.rows + .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) + .map(({ table_name, table_type }) => { + if (this._isView(table_type)) { + return `${table_name} (v)`; + } else { + return table_name; + } + }); + }, + + _isView(table_type) { + return table_type === 'VIEW'; + }, + + _isSystemSchema(schema_name) { + if (_.startsWith(schema_name, 'pg_')) { + return true; + } + + if (_.includes(['information_schema'], schema_name)) { + return true; + } + + return false; + }, +}; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js new file mode 100644 index 0000000..26948e7 --- /dev/null +++ b/reverse_engineering/helpers/queryConstants.js @@ -0,0 +1,5 @@ +module.exports = { + PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', + GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', + GET_TABLE_NAMES: 'SELECT * FROM information_schema.tables WHERE table_schema = $1 ORDER BY table_name;', +}; From 8b98b191afd1aa5ceaf6010f1cf67edc57ad1fd0 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 22 Sep 2021 11:13:03 +0300 Subject: [PATCH 03/69] Fixed configs for json types --- properties_pane/defaultData.json | 35 +++------------------------- types/char.json | 39 -------------------------------- types/json.json | 27 ++++++++++++++++++++++ types/object.json | 30 ++++++++++++++++++++++++ 4 files changed, 60 insertions(+), 71 deletions(-) create mode 100644 types/object.json diff --git a/properties_pane/defaultData.json b/properties_pane/defaultData.json index 6171a1b..c1bd62d 100644 --- a/properties_pane/defaultData.json +++ b/properties_pane/defaultData.json @@ -5,47 +5,18 @@ "dbVendor": "PostgreSQL" }, "container": { - "name": "New schema", - "indexes": [] + "name": "New schema" }, "collection": { "collectionName": "New table", "memory_optimized": false, "collectionUsers": [], - "collation": {}, - "chkConstr": { - "constrCheck": true, - "constrEnforceUpserts": true, - "constrEnforceReplication": true - }, - "Indxs": { - "indxType": "Index", - "ALLOW_ROW_LOCKS": true, - "ALLOW_PAGE_LOCKS": true - } + "collation": {} }, "field": { "name": "New column", - "signed": true, "primaryKey": false, - "unique": false, - "hasMaxLength": { - "valueDependencies": [ - { - "value": true, - "dependency": { - "type": "or", - "values": [{ - "key": "subtype", - "value": "object" - }, { - "key": "subtype", - "value": "array" - }] - } - } - ] - } + "unique": false }, "patternField": { "name": "^[a-zA-Z0-9_.-]+$" diff --git a/types/char.json b/types/char.json index 19c541c..5ef17cd 100644 --- a/types/char.json +++ b/types/char.json @@ -6,14 +6,6 @@ "useSample": true, "default": true, "hiddenOnEntity": "view", - "jsonType": { - "order": 1, - "jsonRoot": true, - "source": { - "key": "synonym", - "value": "json" - } - }, "defaultValues": { "primaryKey": false, "relationshipType": "", @@ -21,42 +13,11 @@ "childRelationships": [], "foreignCollection": "", "foreignField": [], - "default": "", - "minLength": "", - "maxLength": "", - "pattern": "", "enum": [], "sample":"", "comments":"", "mode": "varchar", "length": 10, "subtype": "string" - }, - "subtypes": { - "object": { - "parentType": "jsonObject", - "childValueType": [ - "jsonString", - "jsonNumber", - "jsonObject", - "jsonArray", - "jsonBoolean", - "jsonNull" - ] - }, - "array": { - "parentType": "jsonArray", - "childValueType": [ - "jsonString", - "jsonNumber", - "jsonObject", - "jsonArray", - "jsonBoolean", - "jsonNull" - ] - }, - "string": { - "parentType": "string" - } } } \ No newline at end of file diff --git a/types/json.json b/types/json.json index a0a5b64..7262d19 100644 --- a/types/json.json +++ b/types/json.json @@ -18,5 +18,32 @@ "maxProperties": "", "additionalProperties": false, "enum": [] + }, + "subtypes": { + "object": { + "parentType": "jsonObject", + "childValueType": [ + "jsonString", + "jsonNumber", + "jsonObject", + "jsonArray", + "jsonBoolean", + "jsonNull" + ] + }, + "array": { + "parentType": "jsonArray", + "childValueType": [ + "jsonString", + "jsonNumber", + "jsonObject", + "jsonArray", + "jsonBoolean", + "jsonNull" + ] + }, + "string": { + "parentType": "string" + } } } \ No newline at end of file diff --git a/types/object.json b/types/object.json new file mode 100644 index 0000000..4693ba1 --- /dev/null +++ b/types/object.json @@ -0,0 +1,30 @@ +{ + "name": "object", + "parentType": "document", + "structureType": true, + "defaultValues": { + "subtype": "object", + "properties": [] + }, + "subtypes": { + "object": { + "childValueType": [ + "char", + "numeric", + "binary", + "datetime", + "boolean", + "range", + "enum", + "geometry", + "inet", + "uuid", + "oid", + "xml", + "json", + "reference", + "multiple" + ] + } + } +} \ No newline at end of file From b7dd3f187f88d58ecff9173469d666ba9c579829 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 24 Sep 2021 11:52:20 +0300 Subject: [PATCH 04/69] RE: added retrieving of table properties --- .../entity_level/entityLevelConfig.json | 2 +- reverse_engineering/api.js | 32 ++++- .../helpers/postgresService.js | 133 ++++++++++++++++-- reverse_engineering/helpers/queryConstants.js | 8 ++ 4 files changed, 155 insertions(+), 20 deletions(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index bb4f852..80b9470 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -247,7 +247,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Autovacuum parameters", - "propertyKeyword": "Autovacuum", + "propertyKeyword": "autovacuum", "propertyType": "block", "propertyTooltip": "Vacuum parameters", "structure": [ diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 65fbe8f..0fd623f 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -36,12 +36,12 @@ module.exports = { logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); const postgresLogger = createLogger({ - title: 'Test connection log', + title: 'Get DB collections names', hiddenKeys: connectionInfo.hiddenKeys, logger, }); - await postgresService.setDependencies(app); + postgresService.setDependencies(app); await postgresService.connect(connectionInfo, postgresLogger); const schemasNames = await postgresService.getAllSchemasNames(); @@ -71,12 +71,36 @@ module.exports = { callback(null, collections); } catch (error) { callback(prepareError(error)); - } finally { await postgresService.disconnect(); } }, - async getDbCollectionsData(data, logger, callback, app) {}, + async getDbCollectionsData(data, logger, callback, app) { + try { + logger.log('info', data, 'Retrieve tables data:', data.hiddenKeys); + + const postgresLogger = createLogger({ + title: 'Get DB collections data log', + hiddenKeys: data.hiddenKeys, + logger, + }); + + postgresLogger.progress('Start reverse engineering...'); + + const collections = data.collectionData.collections; + const schemasNames = data.collectionData.dataBaseNames; + + await Promise.all( + schemasNames.map(schemaName => postgresService.retrieveEntitiesData(schemaName, collections[schemaName])) + ); + + callback(null, collections); + } catch (error) { + callback(prepareError(error)); + } finally { + await postgresService.disconnect(); + } + }, }; const createLogger = ({ title, logger, hiddenKeys }) => { diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index fd38c0e..647c1fa 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -4,6 +4,9 @@ const queryConstants = require('./queryConstants'); let currentSshTunnel = null; let _ = null; +let logger = null; + +const VIEW_SUFFIX = ' (v)'; module.exports = { setDependencies(app) { @@ -15,6 +18,7 @@ module.exports = { db.initializePool(pool, logger); currentSshTunnel = sshTunnel; + logger = logger; }, async disconnect() { @@ -33,9 +37,7 @@ module.exports = { async getAllSchemasNames() { const result = await db.query(queryConstants.GET_SCHEMA_NAMES); - return result.rows - .map(({ schema_name }) => schema_name) - .filter(schemaName => !this._isSystemSchema(schemaName)); + return result.rows.map(({ schema_name }) => schema_name).filter(schemaName => !isSystemSchema(schemaName)); }, async getTablesNames(schemaName) { @@ -46,27 +48,128 @@ module.exports = { return result.rows .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) .map(({ table_name, table_type }) => { - if (this._isView(table_type)) { - return `${table_name} (v)`; + if (isViewByTableType(table_type)) { + return `${table_name}${VIEW_SUFFIX}`; } else { return table_name; } }); }, - _isView(table_type) { - return table_type === 'VIEW'; + async retrieveEntitiesData(schemaName, entitiesNames) { + const schemaOidResult = await db.query(queryConstants.GET_NAMESPACE_OID, [schemaName]); + const schemaOid = _.first(schemaOidResult.rows).oid; + + const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); + + debugger; + return Promise.all(_.map(tablesNames, _.partial(this._retrieveSingleTableData, schemaOid))); }, - _isSystemSchema(schema_name) { - if (_.startsWith(schema_name, 'pg_')) { - return true; - } + async _retrieveSingleTableData(schemaOid, tableName) { + const result = await db.query(queryConstants.GET_TABLE_LEVEL_DATA, [tableName, schemaOid]); - if (_.includes(['information_schema'], schema_name)) { - return true; - } + const rawTableData = _.first(result.rows); + + const temporary = rawTableData.relpersistence === 't'; + const unlogged = rawTableData.relpersistence === 'u'; + const storage_parameter = prepareStorageParameters(rawTableData.reloptions); + const table_tablespace_name = result.spcname; - return false; + const tableDate = { + temporary, + unlogged, + storage_parameter, + table_tablespace_name, + }; + + return clearEmptyPropertiesInObject(tableDate); }, }; + +const isViewByTableType = table_type => table_type === 'VIEW'; +const isViewByName = name => _.endsWith(name, VIEW_SUFFIX); + +const isSystemSchema = schema_name => { + if (_.startsWith(schema_name, 'pg_')) { + return true; + } + + if (_.includes(['information_schema'], schema_name)) { + return true; + } + + return false; +}; + +const prepareStorageParameters = reloptions => { + if (!reloptions) { + return null; + } + + const options = _.fromPairs(_.map(reloptions, splitByEqualitySymbol)); + + const fillfactor = options.fillfactor; + const parallel_workers = options.parallel_workers; + const autovacuum_enabled = options.autovacuum_enabled; + const autovacuum = { + vacuum_index_cleanup: options.vacuum_index_cleanup, + vacuum_truncate: options.vacuum_truncate, + autovacuum_vacuum_threshold: options.autovacuum_vacuum_threshold, + autovacuum_vacuum_scale_factor: options.autovacuum_vacuum_scale_factor, + autovacuum_vacuum_insert_threshold: options.autovacuum_vacuum_insert_threshold, + autovacuum_vacuum_insert_scale_factor: options.autovacuum_vacuum_insert_scale_factor, + autovacuum_analyze_threshold: options.autovacuum_analyze_threshold, + autovacuum_analyze_scale_factor: options.autovacuum_analyze_scale_factor, + autovacuum_vacuum_cost_delay: options.autovacuum_vacuum_cost_delay, + autovacuum_vacuum_cost_limit: options.autovacuum_vacuum_cost_limit, + autovacuum_freeze_min_age: options.autovacuum_freeze_min_age, + autovacuum_freeze_max_age: options.autovacuum_freeze_max_age, + autovacuum_freeze_table_age: options.autovacuum_freeze_table_age, + autovacuum_multixact_freeze_min_age: options.autovacuum_multixact_freeze_min_age, + autovacuum_multixact_freeze_max_age: options.autovacuum_multixact_freeze_max_age, + autovacuum_multixact_freeze_table_age: options.autovacuum_multixact_freeze_table_age, + log_autovacuum_min_duration: options.log_autovacuum_min_duration, + }; + const user_catalog_table = options.user_catalog_table; + const toast_autovacuum_enabled = options['toast.autovacuum_enabled']; + const toast = { + toast_tuple_target: options.toast_tuple_target, + toast_vacuum_index_cleanup: options['toast.vacuum_index_cleanup'], + toast_vacuum_truncate: options['toast.vacuum_truncate'], + toast_autovacuum_vacuum_threshold: options['toast.autovacuum_vacuum_threshold'], + toast_autovacuum_vacuum_scale_factor: options['toast.autovacuum_vacuum_scale_factor'], + toast_autovacuum_vacuum_insert_threshold: options['toast.autovacuum_vacuum_insert_threshold'], + toast_autovacuum_vacuum_insert_scale_factor: options['toast.autovacuum_vacuum_insert_scale_factor'], + toast_autovacuum_vacuum_cost_delay: options['toast.autovacuum_vacuum_cost_delay'], + toast_autovacuum_vacuum_cost_limit: options['toast.autovacuum_vacuum_cost_limit'], + toast_autovacuum_freeze_min_age: options['toast.autovacuum_freeze_min_age'], + toast_autovacuum_freeze_max_age: options['toast.autovacuum_freeze_max_age'], + toast_autovacuum_freeze_table_age: options['toast.autovacuum_freeze_table_age'], + toast_autovacuum_multixact_freeze_min_age: options['toast.autovacuum_multixact_freeze_min_age'], + toast_autovacuum_multixact_freeze_max_age: options['toast.autovacuum_multixact_freeze_max_age'], + toast_autovacuum_multixact_freeze_table_age: options['toast.autovacuum_multixact_freeze_table_age'], + toast_log_autovacuum_min_duration: options['toast.log_autovacuum_min_duration'], + }; + + const storage_parameter = { + fillfactor, + parallel_workers, + autovacuum_enabled, + autovacuum: clearEmptyPropertiesInObject(autovacuum), + toast_autovacuum_enabled, + toast: clearEmptyPropertiesInObject(toast), + user_catalog_table, + }; + + return clearEmptyPropertiesInObject(storage_parameter); +}; + +const splitByEqualitySymbol = item => _.split(item, '='); + +const clearEmptyPropertiesInObject = obj => + _.chain(obj) + .toPairs() + .filter(([key, value]) => Boolean(value)) + .fromPairs() + .value(); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 26948e7..74c0032 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -2,4 +2,12 @@ module.exports = { PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', GET_TABLE_NAMES: 'SELECT * FROM information_schema.tables WHERE table_schema = $1 ORDER BY table_name;', + GET_NAMESPACE_OID: 'SELECT oid FROM pg_namespace WHERE nspname = $1', + GET_TABLE_LEVEL_DATA: `SELECT pc.relpersistence, pc.reloptions, pt.spcname, ppt.partstrat, ppt.partattrs + FROM pg_class AS pc + LEFT JOIN pg_tablespace AS pt + ON pc.reltablespace = pt.oid + LEFT JOIN pg_partitioned_table AS ppt + ON pc.oid = ppt.partrelid + WHERE pc.relname = $1 AND pc.relnamespace = $2;`, }; From 1da7f8a7893d92237b063409dd3006c71660afc3 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 24 Sep 2021 11:57:29 +0300 Subject: [PATCH 05/69] RE: removed partition data from query --- reverse_engineering/helpers/queryConstants.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 74c0032..0d6826c 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -3,11 +3,9 @@ module.exports = { GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', GET_TABLE_NAMES: 'SELECT * FROM information_schema.tables WHERE table_schema = $1 ORDER BY table_name;', GET_NAMESPACE_OID: 'SELECT oid FROM pg_namespace WHERE nspname = $1', - GET_TABLE_LEVEL_DATA: `SELECT pc.relpersistence, pc.reloptions, pt.spcname, ppt.partstrat, ppt.partattrs + GET_TABLE_LEVEL_DATA: `SELECT pc.relpersistence, pc.reloptions, pt.spcname FROM pg_class AS pc LEFT JOIN pg_tablespace AS pt ON pc.reltablespace = pt.oid - LEFT JOIN pg_partitioned_table AS ppt - ON pc.oid = ppt.partrelid WHERE pc.relname = $1 AND pc.relnamespace = $2;`, }; From 725f8d4ef1e3febb027f0613304e9291225d74b2 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 28 Sep 2021 18:10:15 +0300 Subject: [PATCH 06/69] RE: added handling table properties and columns --- .../entity_level/entityLevelConfig.json | 620 ++++++++++-------- .../field_level/fieldLevelConfig.json | 22 +- reverse_engineering/api.js | 32 +- reverse_engineering/helpers/getJsonSchema.js | 14 + .../helpers/packageDataHelper.js | 0 .../helpers/postgresHelpers/columnHelper.js | 198 ++++++ .../helpers/postgresHelpers/common.js | 17 + .../helpers/postgresHelpers/tableHelper.js | 150 +++++ .../helpers/postgresService.js | 152 ++--- reverse_engineering/helpers/queryConstants.js | 40 +- types/char.json | 6 +- types/json.json | 9 +- 12 files changed, 875 insertions(+), 385 deletions(-) create mode 100644 reverse_engineering/helpers/getJsonSchema.js create mode 100644 reverse_engineering/helpers/packageDataHelper.js create mode 100644 reverse_engineering/helpers/postgresHelpers/columnHelper.js create mode 100644 reverse_engineering/helpers/postgresHelpers/common.js create mode 100644 reverse_engineering/helpers/postgresHelpers/tableHelper.js diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 80b9470..d5688f7 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -112,7 +112,6 @@ making sure that you maintain a proper JSON format. } */ - [ { "lowerTab": "Details", @@ -177,15 +176,18 @@ making sure that you maintain a proper JSON format. "abbr": "pk,PK", "dependency": { "type": "or", - "values": [{ - "key": "partitioning_expression", - "value": false - }, { - "key": "partitioning_expression", - "exists": false - }] + "values": [ + { + "key": "partitioning_expression", + "value": false + }, + { + "key": "partitioning_expression", + "exists": false + } + ] } - }, + }, { "propertyName": "Expression", "propertyKeyword": "partitioning_expression", @@ -195,18 +197,21 @@ making sure that you maintain a proper JSON format. "markdown": false, "dependency": { "type": "or", - "values": [{ - "key": "compositePartitionKey", - "value": false - }, { - "key": "compositePartitionKey", - "exists": false - }] + "values": [ + { + "key": "compositePartitionKey", + "value": false + }, + { + "key": "compositePartitionKey", + "exists": false + } + ] } } ] }, - { + { "propertyName": "Using method", "propertyKeyword": "method", "propertyTooltip": "Optional clause to specify the table access method to use to store the contents for the new table; the method needs be an access method of type TABLE.", @@ -638,7 +643,10 @@ making sure that you maintain a proper JSON format. "valueType": "string" } ], - "columnsRatio": [3.7, 5] + "columnsRatio": [ + 3.7, + 5 + ] }, { "lowerTab": "Composite keys", @@ -691,7 +699,7 @@ making sure that you maintain a proper JSON format. "template": "textarea" } ] - }, + }, { "propertyName": "Unique key", "propertyType": "group", @@ -744,281 +752,327 @@ making sure that you maintain a proper JSON format. }, { "lowerTab": "Indexes", - "structure": [{ - "propertyName": "Index", - "propertyType": "group", - "propertyKeyword": "Indxs", - "propertyTooltip": "In general you should only add indexes to match the queries your application uses. Any extra will waste resources.", - "structure": [ - { - "propertyName": "Name", - "propertyKeyword": "indxName", - "propertyTooltip": "Optional, if not specified an automatic name will be assigned. Index name are needed to drop indexes and appear in error messages when a constraint is violated.", - "propertyType": "text" - }, - { - "propertyName": "Activated", - "propertyKeyword": "isActivated", - "propertyTooltip": "Deactivated item will be not included in FE script", - "propertyType": "checkbox", - "defaultValue": true - }, - { - "propertyName": "Method", - "propertyKeyword": "index_method", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "btree", - "options": [ - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Unique", - "propertyKeyword": "unique", - "propertyTooltip": "Causes the system to check for duplicate values in the table when the index is created (if data already exist) and each time data is added. Attempts to insert or update data which would result in duplicate entries will generate an error.", - "defaultValue": false, - "propertyType": "checkbox", - "dependency": { - "type": "or", - "values": [{ - "key": "index_method", - "value": "btree" - }] - } - }, - { - "propertyName": "Concurrent build", - "propertyKeyword": "concurrently", - "propertyTooltip": "When this option is used, PostgreSQL will build the index without taking any locks that prevent concurrent inserts, updates, or deletes on the table; whereas a standard index build locks out writes (but not reads) on the table until it's done.", - "defaultValue": false, - "propertyType": "checkbox" - }, - { - "propertyName": "If not exist", - "propertyKeyword": "ifNotExist", - "propertyTooltip": "The index will only be created if an index with the same name does not already exist. If the index already exists, then a warning will be triggered by default.", - "defaultValue": true, - "propertyType": "checkbox" - }, - { - "propertyName": "Only", - "propertyKeyword": "only", - "propertyTooltip": "Indicates not to recurse creating indexes on partitions, if the table is partitioned.", - "defaultValue": true, - "propertyType": "checkbox" - }, - { - "propertyName": "Columns", - "propertyKeyword": "columns", - "propertyType": "fieldList", - "template": "orderedList", - "propertyTooltip": "The name of a column of the table.", - "attributeList": [ - "ASC", - "DESC" - ] - }, - { - "propertyName": "Include non-key columns", - "propertyKeyword": "include", - "propertyType": "fieldList", - "template": "orderedList", - "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the index as non-key columns.", - "dependency": { - "type": "or", - "values": [{ - "key": "index_method", - "value": "btree" - }, { - "key": "index_method", - "value": "gist" - }] - } - }, - { - "propertyName": "With storage parameters", - "propertyKeyword": "index_storage_parameter", - "propertyType": "block", - "propertyTooltip": "The optional WITH clause specifies storage parameters for the index. Each index method has its own set of allowed storage parameters.", - "structure": [ - { - "propertyName": "Fill factor", - "propertyKeyword": "index_fillfactor", - "propertyType": "numeric", - "valueType": "number", - "propertyTooltip": "A percentage between 10 and 100. The fillfactor for an index is a percentage that determines how full the index method will try to pack index pages.", - "minValue": 10, - "maxValue": 100, - "step": 1, - "defaultValue": 100 - }, - { - "propertyName": "Deduplicate items", - "propertyKeyword": "deduplicate_items", - "propertyType": "checkbox", - "propertyTooltip": "Declare the table as an additional catalog table for purposes of logical replication.", - "defaultValue": true, - "dependency": { - "type": "or", - "values": [{ + "structure": [ + { + "propertyName": "Index", + "propertyType": "group", + "propertyKeyword": "Indxs", + "propertyTooltip": "In general you should only add indexes to match the queries your application uses. Any extra will waste resources.", + "structure": [ + { + "propertyName": "Name", + "propertyKeyword": "indxName", + "propertyTooltip": "Optional, if not specified an automatic name will be assigned. Index name are needed to drop indexes and appear in error messages when a constraint is violated.", + "propertyType": "text" + }, + { + "propertyName": "Activated", + "propertyKeyword": "isActivated", + "propertyTooltip": "Deactivated item will be not included in FE script", + "propertyType": "checkbox", + "defaultValue": true + }, + { + "propertyName": "Method", + "propertyKeyword": "index_method", + "propertyTooltip": "", + "propertyType": "select", + "defaultValue": "btree", + "options": [ + "btree", + "hash", + "gist", + "spgist", + "gin", + "brin" + ] + }, + { + "propertyName": "Unique", + "propertyKeyword": "unique", + "propertyTooltip": "Causes the system to check for duplicate values in the table when the index is created (if data already exist) and each time data is added. Attempts to insert or update data which would result in duplicate entries will generate an error.", + "defaultValue": false, + "propertyType": "checkbox", + "dependency": { + "type": "or", + "values": [ + { "key": "index_method", "value": "btree" - }] + } + ] + } + }, + { + "propertyName": "Concurrent build", + "propertyKeyword": "concurrently", + "propertyTooltip": "When this option is used, PostgreSQL will build the index without taking any locks that prevent concurrent inserts, updates, or deletes on the table; whereas a standard index build locks out writes (but not reads) on the table until it's done.", + "defaultValue": false, + "propertyType": "checkbox" + }, + { + "propertyName": "If not exist", + "propertyKeyword": "ifNotExist", + "propertyTooltip": "The index will only be created if an index with the same name does not already exist. If the index already exists, then a warning will be triggered by default.", + "defaultValue": true, + "propertyType": "checkbox" + }, + { + "propertyName": "Only", + "propertyKeyword": "only", + "propertyTooltip": "Indicates not to recurse creating indexes on partitions, if the table is partitioned.", + "defaultValue": true, + "propertyType": "checkbox" + }, + { + "propertyName": "Columns", + "propertyKeyword": "columns", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The name of a column of the table.", + "attributeList": { + "sortOrder": { + "propertyType": "select", + "options": [ + "ASC", + "DESC" + ] + }, + "mullsOrder": { + "propertyType": "select", + "options": [ + "", + "NULLS FIRST", + "NULLS LAST" + ] + }, + "collation": { + "propertyType": "text", + "placeholder": "Collation" + }, + "opclass": { + "propertyType": "text", + "placeholder": "Opclass" } - }, - { - "propertyName": "Fast update", - "propertyKeyword": "fastupdate", - "propertyType": "checkbox", - "propertyTooltip": "This setting controls usage of the fast update technique.", - "defaultValue": true, - "dependency": { - "type": "or", - "values": [{ + } + }, + { + "propertyName": "Include non-key columns", + "propertyKeyword": "include", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the index as non-key columns.", + "dependency": { + "type": "or", + "values": [ + { "key": "index_method", - "value": "gin" - }] - } - }, - { - "propertyName": "Gin pending list limit", - "propertyKeyword": "gin_pending_list_limit", - "propertyType": "numeric", - "valueType": "number", - "propertyTooltip": "This value is specified in kilobytes. Sets the maximum size of a GIN index's pending list, which is used when fastupdate is enabled.", - "minValue": 0, - "step": 1, - "defaultValue": 4000, - "dependency": { - "type": "and", - "values": [{ + "value": "btree" + }, + { "key": "index_method", - "value": "gin" - }, { - "key": "fastupdate", - "value": true - }] + "value": "gist" + } + ] + } + }, + { + "propertyName": "With storage parameters", + "propertyKeyword": "index_storage_parameter", + "propertyType": "block", + "propertyTooltip": "The optional WITH clause specifies storage parameters for the index. Each index method has its own set of allowed storage parameters.", + "structure": [ + { + "propertyName": "Fill factor", + "propertyKeyword": "index_fillfactor", + "propertyType": "numeric", + "valueType": "number", + "propertyTooltip": "A percentage between 10 and 100. The fillfactor for an index is a percentage that determines how full the index method will try to pack index pages.", + "minValue": 10, + "maxValue": 100, + "step": 1, + "defaultValue": 100 + }, + { + "propertyName": "Deduplicate items", + "propertyKeyword": "deduplicate_items", + "propertyType": "checkbox", + "propertyTooltip": "Declare the table as an additional catalog table for purposes of logical replication.", + "defaultValue": true, + "dependency": { + "type": "or", + "values": [ + { + "key": "index_method", + "value": "btree" + } + ] + } + }, + { + "propertyName": "Fast update", + "propertyKeyword": "fastupdate", + "propertyType": "checkbox", + "propertyTooltip": "This setting controls usage of the fast update technique.", + "defaultValue": true, + "dependency": { + "type": "or", + "values": [ + { + "key": "index_method", + "value": "gin" + } + ] + } + }, + { + "propertyName": "Gin pending list limit", + "propertyKeyword": "gin_pending_list_limit", + "propertyType": "numeric", + "valueType": "number", + "propertyTooltip": "This value is specified in kilobytes. Sets the maximum size of a GIN index's pending list, which is used when fastupdate is enabled.", + "minValue": 0, + "step": 1, + "defaultValue": 4000, + "dependency": { + "type": "and", + "values": [ + { + "key": "index_method", + "value": "gin" + }, + { + "key": "fastupdate", + "value": true + } + ] + } + }, + { + "propertyName": "Pages per range", + "propertyKeyword": "pages_per_range", + "propertyType": "numeric", + "valueType": "number", + "propertyTooltip": "Defines the number of table blocks that make up one block range for each entry of a BRIN index.", + "minValue": 0, + "step": 1, + "defaultValue": 128, + "dependency": { + "type": "or", + "values": [ + { + "key": "index_method", + "value": "brin" + } + ] + } + }, + { + "propertyName": "Auto summarize", + "propertyKeyword": "autosummarize ", + "propertyType": "checkbox", + "propertyTooltip": "Defines whether a summarization run is invoked for the previous page range whenever an insertion is detected on the next one.", + "defaultValue": true, + "dependency": { + "type": "or", + "values": [ + { + "key": "index_method", + "value": "brin" + } + ] + } } - }, - { - "propertyName": "Pages per range", - "propertyKeyword": "pages_per_range", - "propertyType": "numeric", - "valueType": "number", - "propertyTooltip": "Defines the number of table blocks that make up one block range for each entry of a BRIN index.", - "minValue": 0, - "step": 1, - "defaultValue": 128, - "dependency": { - "type": "or", - "values": [{ + ], + "dependency": { + "type": "or", + "values": [ + { "key": "index_method", - "value": "brin" - }] - } - }, - { - "propertyName": "Auto summarize", - "propertyKeyword": "autosummarize ", - "propertyType": "checkbox", - "propertyTooltip": "Defines whether a summarization run is invoked for the previous page range whenever an insertion is detected on the next one.", - "defaultValue": true, - "dependency": { - "type": "or", - "values": [{ + "value": "btree" + }, + { "key": "index_method", - "value": "brin" - }] - } + "value": "hash" + }, + { + "key": "index_method", + "value": "gist" + }, + { + "key": "index_method", + "value": "spgist" + } + ] } - ], - "dependency": { - "type": "or", - "values": [{ - "key": "index_method", - "value": "btree" - }, { - "key": "index_method", - "value": "hash" - }, { - "key": "index_method", - "value": "gist" - }, { - "key": "index_method", - "value": "spgist" - }] + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "index_tablespace_name", + "propertyTooltip": "Enter the name of an existing tablespace location for the database, or pg_default", + "defaultValue": "pg_default", + "propertyType": "text" + }, + { + "propertyName": "Where constraint", + "propertyKeyword": "where", + "propertyTooltip": "Popup for multi-line text entry", + "propertyType": "details", + "markdown": false, + "template": "textarea" + }, + { + "propertyName": "Comment", + "propertyKeyword": "indexComment", + "propertyTooltip": "comment", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" } - }, - { - "propertyName": "Tablespace", - "propertyKeyword": "index_tablespace_name", - "propertyTooltip": "Enter the name of an existing tablespace location for the database, or pg_default", - "defaultValue": "pg_default", - "propertyType": "text" - }, - { - "propertyName": "Where constraint", - "propertyKeyword": "where", - "propertyTooltip": "Popup for multi-line text entry", - "propertyType": "details", - "markdown": false, - "template": "textarea" - }, - { - "propertyName": "Comment", - "propertyKeyword": "indexComment", - "propertyTooltip": "comment", - "addTimestampButton": false, - "propertyType": "details", - "template": "textarea" - } - ] - }] + ] + } + ] }, { "lowerTab": "Check Constraints", - "structure": [{ - "propertyName": "Check Constraint", - "propertyType": "group", - "propertyKeyword": "chkConstr", - "propertyTooltip": "", - "structure": [ - { - "propertyName": "Name", - "propertyKeyword": "chkConstrName", - "propertyTooltip": "", - "propertyType": "text" - }, - { - "propertyName": "Description", - "propertyKeyword": "constrDescription", - "propertyTooltip": "description", - "propertyType": "details", - "template": "textarea" - }, - { - "propertyName": "Expression", - "propertyKeyword": "constrExpression", - "propertyTooltip": "Expression", - "propertyType": "details", - "template": "textarea", - "markdown": false - }, - { - "propertyName": "Comments", - "propertyKeyword": "constrComments", - "propertyTooltip": "comments", - "addTimestampButton": false, - "propertyType": "details", - "template": "textarea" - } - ] - }] + "structure": [ + { + "propertyName": "Check Constraint", + "propertyType": "group", + "propertyKeyword": "chkConstr", + "propertyTooltip": "", + "structure": [ + { + "propertyName": "Name", + "propertyKeyword": "chkConstrName", + "propertyTooltip": "", + "propertyType": "text" + }, + { + "propertyName": "Description", + "propertyKeyword": "constrDescription", + "propertyTooltip": "description", + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Expression", + "propertyKeyword": "constrExpression", + "propertyTooltip": "Expression", + "propertyType": "details", + "template": "textarea", + "markdown": false + }, + { + "propertyName": "Comments", + "propertyKeyword": "constrComments", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + } + ] + } + ] } -] +] \ No newline at end of file diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 05e2d3e..8f062e3 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -207,7 +207,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "step": 1, + "step": 1 } ] }, @@ -3095,6 +3095,26 @@ making sure that you maintain a proper JSON format. "data": "options", "valueType": "string" }, + { + "propertyName": "JSON Types", + "propertyKeyword": "subtype", + "propertyType": "select", + "options": [ + { + "name": " ", + "value": "string" + }, + { + "name": "object", + "value": "object" + }, + { + "name": "array", + "value": "array" + } + ], + "defaultValue": "object" + }, { "propertyName": "Comments", "propertyKeyword": "description", diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 0fd623f..213299c 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -24,6 +24,7 @@ module.exports = { await postgresService.pingDb(); callback(); } catch (error) { + logger.log('error', prepareError(error), 'Test connection instance log'); callback(prepareError(error)); } finally { await postgresService.disconnect(); @@ -70,6 +71,7 @@ module.exports = { callback(null, collections); } catch (error) { + logger.log('error', prepareError(error), 'Get DB collections names'); callback(prepareError(error)); await postgresService.disconnect(); } @@ -90,12 +92,34 @@ module.exports = { const collections = data.collectionData.collections; const schemasNames = data.collectionData.dataBaseNames; - await Promise.all( - schemasNames.map(schemaName => postgresService.retrieveEntitiesData(schemaName, collections[schemaName])) - ); + const packages = await Promise.all( + schemasNames.map(async schemaName => ({ + schemaName, + entities: await postgresService.retrieveEntitiesData( + schemaName, + collections[schemaName], + data.recordSamplingSettings + ), + })) + ).then(tablesDataPerSchema => { + return tablesDataPerSchema.flatMap(({ schemaName, entities }) => + entities.map(entityData => ({ + dbName: schemaName, + collectionName: entityData.name, + documents: entityData.documents, + views: [], + emptyBucket: false, + entityLevel: entityData.entityLevel, + validation: { + jsonSchema: entityData.jsonSchema, + }, + })) + ); + }); - callback(null, collections); + callback(null, packages); } catch (error) { + logger.log('error', prepareError(error), 'Retrieve tables data'); callback(prepareError(error)); } finally { await postgresService.disconnect(); diff --git a/reverse_engineering/helpers/getJsonSchema.js b/reverse_engineering/helpers/getJsonSchema.js new file mode 100644 index 0000000..c464ed7 --- /dev/null +++ b/reverse_engineering/helpers/getJsonSchema.js @@ -0,0 +1,14 @@ +const getJsonSchema = (columns) => { + const properties = columns.reduce((properties, column) => { + return { + ...properties, + [column.name]: column, + }; + }, {}); + + return { properties }; +}; + +module.exports = { + getJsonSchema, +}; diff --git a/reverse_engineering/helpers/packageDataHelper.js b/reverse_engineering/helpers/packageDataHelper.js new file mode 100644 index 0000000..e69de29 diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js new file mode 100644 index 0000000..5983799 --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -0,0 +1,198 @@ +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const columnPropertiesMapper = { + columns_default: 'default', + is_nullable: { + keyword: 'required', + values: { + YES: false, + NO: true, + }, + }, + data_type: 'type', + numeric_precision: 'precision', + numeric_scale: 'scale', + datetime_precision: 'timePrecision', + interval_type: 'intervalOptions', + collation_name: 'collationRule', + column_name: 'name', + attndims: 'numberOfArrayDimensions', + udt_name: 'udt_name', + character_maximum_length: 'length', +}; + +const mapColumnData = column => { + return _.chain(column) + .toPairs() + .map(([key, value]) => [ + columnPropertiesMapper[key]?.keyword || columnPropertiesMapper[key], + _.get(columnPropertiesMapper, `${key}.values.${value}`, value), + ]) + .filter(([key, value]) => key && !_.isNil(value)) + .fromPairs() + .thru(setColumnType) + .value(); +}; + +const setColumnType = column => ({ + ...column, + ...mapType(column.type), + ...getArrayType(column), +}); + +const getArrayType = column => { + if (column.type !== 'ARRAY') { + return {}; + } + + const typeData = mapType(column.udt_name.slice(1)); + + return { + ...typeData, + array_type: _.fill(Array(column.numberOfArrayDimensions), ''), + }; +}; + +const mapType = type => { + switch (type) { + case 'bigint': + case 'bigserial': + case 'smallint': + case 'integer': + case 'numeric': + case 'real': + case 'double precision': + case 'smallserial': + case 'serial': + case 'money': + return { type: 'numeric', mode: type }; + case 'int8': + return { type: 'numeric', mode: 'bigint' }; + case 'int2': + return { type: 'numeric', mode: 'smallint' }; + case 'int4': + return { type: 'numeric', mode: 'integer' }; + case 'float4': + return { type: 'numeric', mode: 'real' }; + case 'float8': + return { type: 'numeric', mode: 'double precision' }; + case 'bit': + case 'char': + case 'text': + return { type: 'char', mode: type }; + case 'bit varying': + return { type: 'char', mode: 'varbit' }; + case 'character': + return { type: 'char', mode: 'char' }; + case 'character varying': + return { type: 'char', mode: 'varchar' }; + case 'bpchar': + return { type: 'char', mode: 'char' }; + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + return { type: 'geometry', mode: type }; + case 'bytea': + return { type: 'binary', mode: type }; + case 'inet': + case 'cidr': + case 'macaddr': + case 'macaddr8': + return { type: 'inet', mode: type }; + case 'date': + case 'time': + case 'timestamp': + case 'interval': + return { type: 'datetime', mode: type }; + case 'timestamptz': + case 'timestamp with time zone': + return { type: 'datetime', mode: 'timestamp', with_timezone: true }; + case 'timetz': + case 'time with time zone': + return { type: 'datetime', mode: 'time', with_timezone: true }; + case 'json': + case 'jsonb': + return { type: 'json', mode: type, subtype: 'object' }; + case 'int4range': + case 'int8range': + case 'numrange': + case 'daterange': + case 'tsrange': + case 'tstzrange': + return { type: 'range', mode: type }; + case 'uuid': + case 'xml': + case 'boolean': + return { type }; + case 'bool': + return { type: 'boolean' }; + case 'oid': + case 'regclass': + case 'regcollation': + case 'regconfig': + case 'regdictionary': + case 'regnamespace': + case 'regoper': + case 'regoperator': + case 'regproc': + case 'regprocedure': + case 'regrole': + case 'regtype': + return { type: 'oid', mode: type }; + default: + return { type }; + } +}; + +const setSubtypeFromSampledJsonValues = (columns, documents) => { + const sampleDocument = _.first(documents) || {}; + + return columns.map(column => { + if (column.type !== 'json') { + return column; + } + + const sampleValue = sampleDocument[column.name]; + const parsedValue = safeParse(sampleValue); + const jsonType = getParsedJsonValueType(parsedValue); + + return { + ...column, + synonym: jsonType, + }; + }); +}; + +const safeParse = json => { + try { + return JSON.parse(json); + } catch (error) { + return {}; + } +}; + +const getParsedJsonValueType = value => { + if (Array.isArray(value)) { + return 'array'; + } + + if (typeof value === 'object') { + return 'object'; + } + + return ''; +}; + +module.exports = { + setDependencies, + mapColumnData, + setSubtypeFromSampledJsonValues, +}; diff --git a/reverse_engineering/helpers/postgresHelpers/common.js b/reverse_engineering/helpers/postgresHelpers/common.js new file mode 100644 index 0000000..9ae07c5 --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/common.js @@ -0,0 +1,17 @@ +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const clearEmptyPropertiesInObject = obj => + _.chain(obj) + .toPairs() + .filter(([key, value]) => Boolean(value)) + .fromPairs() + .value(); + +module.exports = { + clearEmptyPropertiesInObject, + setDependencies, +}; diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js new file mode 100644 index 0000000..cb3cdec --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -0,0 +1,150 @@ +const { clearEmptyPropertiesInObject } = require('./common'); + +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const prepareStorageParameters = reloptions => { + if (!reloptions) { + return null; + } + + const options = _.fromPairs(_.map(reloptions, splitByEqualitySymbol)); + + const fillfactor = options.fillfactor; + const parallel_workers = options.parallel_workers; + const autovacuum_enabled = options.autovacuum_enabled; + const autovacuum = { + vacuum_index_cleanup: options.vacuum_index_cleanup, + vacuum_truncate: options.vacuum_truncate, + autovacuum_vacuum_threshold: options.autovacuum_vacuum_threshold, + autovacuum_vacuum_scale_factor: options.autovacuum_vacuum_scale_factor, + autovacuum_vacuum_insert_threshold: options.autovacuum_vacuum_insert_threshold, + autovacuum_vacuum_insert_scale_factor: options.autovacuum_vacuum_insert_scale_factor, + autovacuum_analyze_threshold: options.autovacuum_analyze_threshold, + autovacuum_analyze_scale_factor: options.autovacuum_analyze_scale_factor, + autovacuum_vacuum_cost_delay: options.autovacuum_vacuum_cost_delay, + autovacuum_vacuum_cost_limit: options.autovacuum_vacuum_cost_limit, + autovacuum_freeze_min_age: options.autovacuum_freeze_min_age, + autovacuum_freeze_max_age: options.autovacuum_freeze_max_age, + autovacuum_freeze_table_age: options.autovacuum_freeze_table_age, + autovacuum_multixact_freeze_min_age: options.autovacuum_multixact_freeze_min_age, + autovacuum_multixact_freeze_max_age: options.autovacuum_multixact_freeze_max_age, + autovacuum_multixact_freeze_table_age: options.autovacuum_multixact_freeze_table_age, + log_autovacuum_min_duration: options.log_autovacuum_min_duration, + }; + const user_catalog_table = options.user_catalog_table; + const toast_autovacuum_enabled = options['toast.autovacuum_enabled']; + const toast = { + toast_tuple_target: options.toast_tuple_target, + toast_vacuum_index_cleanup: options['toast.vacuum_index_cleanup'], + toast_vacuum_truncate: options['toast.vacuum_truncate'], + toast_autovacuum_vacuum_threshold: options['toast.autovacuum_vacuum_threshold'], + toast_autovacuum_vacuum_scale_factor: options['toast.autovacuum_vacuum_scale_factor'], + toast_autovacuum_vacuum_insert_threshold: options['toast.autovacuum_vacuum_insert_threshold'], + toast_autovacuum_vacuum_insert_scale_factor: options['toast.autovacuum_vacuum_insert_scale_factor'], + toast_autovacuum_vacuum_cost_delay: options['toast.autovacuum_vacuum_cost_delay'], + toast_autovacuum_vacuum_cost_limit: options['toast.autovacuum_vacuum_cost_limit'], + toast_autovacuum_freeze_min_age: options['toast.autovacuum_freeze_min_age'], + toast_autovacuum_freeze_max_age: options['toast.autovacuum_freeze_max_age'], + toast_autovacuum_freeze_table_age: options['toast.autovacuum_freeze_table_age'], + toast_autovacuum_multixact_freeze_min_age: options['toast.autovacuum_multixact_freeze_min_age'], + toast_autovacuum_multixact_freeze_max_age: options['toast.autovacuum_multixact_freeze_max_age'], + toast_autovacuum_multixact_freeze_table_age: options['toast.autovacuum_multixact_freeze_table_age'], + toast_log_autovacuum_min_duration: options['toast.log_autovacuum_min_duration'], + }; + + const storage_parameter = { + fillfactor, + parallel_workers, + autovacuum_enabled, + autovacuum: clearEmptyPropertiesInObject(autovacuum), + toast_autovacuum_enabled, + toast: clearEmptyPropertiesInObject(toast), + user_catalog_table, + }; + + return clearEmptyPropertiesInObject(storage_parameter); +}; + +const prepareTablePartition = (partitionResult, tableAttributesWithPositions) => { + if (!partitionResult) { + return null; + } + + const partitionType = getPartitionType(partitionResult); + const isExpression = _.some(partitionResult.partition_attributes_positions, position => position === 0); + const key = isExpression ? 'partitioning_expression' : 'compositePartitionKey'; + const value = isExpression + ? getPartitionExpression(partitionResult, tableAttributesWithPositions) + : _.map( + partitionResult.partition_attributes_positions, + getAttributeNameByPosition(tableAttributesWithPositions) + ); + + return { + partitionType, + [key]: value, + }; +}; + +const getPartitionType = partitionResult => { + const type = partitionResult.partition_type; + + switch (type) { + case 'h': + return 'HASH'; + case 'l': + return 'LIST'; + case 'r': + return 'RANGE'; + default: + return ''; + } +}; + +const getPartitionExpression = (partitionResult, tableAttributesWithPositions) => { + let expressionIndex = 0; + const expressions = _.split(partitionResult.expressions, ','); + + return _.chain(partitionResult.partition_attributes_positions) + .map(attributePosition => { + if (attributePosition === 0) { + const expression = expressions[expressionIndex]; + expressionIndex++; + + return expression; + } + + return getAttributeNameByPosition(tableAttributesWithPositions)(attributePosition); + }) + .join(',') + .value(); +}; + +const getAttributeNameByPosition = attributes => position => _.find(attributes, { position })?.name; + +const splitByEqualitySymbol = item => _.split(item, '='); + +const checkHaveJsonTypes = columns => { + return _.find(columns, { type: 'json' }); +}; + +const getLimit = (count, recordSamplingSettings) => { + const per = recordSamplingSettings.relative.value; + const size = + recordSamplingSettings.active === 'absolute' + ? recordSamplingSettings.absolute.value + : Math.round((count / 100) * per); + return size; +}; + +module.exports = { + prepareStorageParameters, + prepareTablePartition, + setDependencies, + checkHaveJsonTypes, + getLimit, +}; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 647c1fa..2c0a225 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -1,5 +1,22 @@ const { createConnectionPool } = require('./connectionHelper'); const db = require('./db'); +const { getJsonSchema } = require('./getJsonSchema'); +const { + setDependencies: setDependenciesInColumnHelper, + mapColumnData, + setSubtypeFromSampledJsonValues, +} = require('./postgresHelpers/columnHelper'); +const { + setDependencies: setDependenciesInCommonHelper, + clearEmptyPropertiesInObject, +} = require('./postgresHelpers/common'); +const { + setDependencies: setDependenciesInTableHelper, + prepareStorageParameters, + prepareTablePartition, + checkHaveJsonTypes, + getLimit, +} = require('./postgresHelpers/tableHelper'); const queryConstants = require('./queryConstants'); let currentSshTunnel = null; @@ -11,6 +28,9 @@ const VIEW_SUFFIX = ' (v)'; module.exports = { setDependencies(app) { _ = app.require('lodash'); + setDependenciesInCommonHelper(app); + setDependenciesInTableHelper(app); + setDependenciesInColumnHelper(app); }, async connect(connectionInfo, logger) { @@ -46,7 +66,10 @@ module.exports = { const tableTypesToExclude = ['FOREIGN TABLE']; return result.rows - .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) + .filter( + ({ table_type, is_table_partitioned }) => + !_.includes(tableTypesToExclude, table_type) && !is_table_partitioned + ) .map(({ table_name, table_type }) => { if (isViewByTableType(table_type)) { return `${table_name}${VIEW_SUFFIX}`; @@ -56,34 +79,73 @@ module.exports = { }); }, - async retrieveEntitiesData(schemaName, entitiesNames) { + async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { const schemaOidResult = await db.query(queryConstants.GET_NAMESPACE_OID, [schemaName]); - const schemaOid = _.first(schemaOidResult.rows).oid; + const schemaOid = getFirstRow(schemaOidResult).oid; const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); - debugger; - return Promise.all(_.map(tablesNames, _.partial(this._retrieveSingleTableData, schemaOid))); + return Promise.all( + _.map( + tablesNames, + _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName) + ) + ); }, - async _retrieveSingleTableData(schemaOid, tableName) { + async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, tableName) { const result = await db.query(queryConstants.GET_TABLE_LEVEL_DATA, [tableName, schemaOid]); + const rawTableData = getFirstRow(result); + const tableOid = rawTableData.oid; + const partitionResult = getFirstRow(await db.query(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid])); + const tableAttributes = (await db.query(queryConstants.GET_TABLE_ATTRIBUTES_WITH_POSITIONS, [tableOid])).rows; + const descriptionResult = await db.query(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid]); - const rawTableData = _.first(result.rows); + const partitioning = prepareTablePartition(partitionResult, tableAttributes); const temporary = rawTableData.relpersistence === 't'; const unlogged = rawTableData.relpersistence === 'u'; const storage_parameter = prepareStorageParameters(rawTableData.reloptions); const table_tablespace_name = result.spcname; + const description = getFirstRow(descriptionResult); - const tableDate = { + const tableData = { temporary, unlogged, storage_parameter, table_tablespace_name, + partitioning, + description, }; - return clearEmptyPropertiesInObject(tableDate); + const entityLevel = clearEmptyPropertiesInObject(tableData); + + const columns = await db.query(queryConstants.GET_TABLE_COLUMNS, [tableName, schemaName, tableOid]); + let targetAttributes = columns.rows.map(mapColumnData); + + const hasJsonTypes = checkHaveJsonTypes(targetAttributes); + let documents = []; + + if (hasJsonTypes) { + documents = await this._getDocuments(schemaName, tableName, recordSamplingSettings); + targetAttributes = setSubtypeFromSampledJsonValues(targetAttributes, documents); + } + + return { + name: tableName, + entityLevel, + jsonSchema: getJsonSchema(targetAttributes), + documents, + }; + }, + + async _getDocuments(schemaName, tableName, recordSamplingSettings) { + const fullTableName = `${schemaName}.${tableName}`; + const quantity = await db.query(queryConstants.GET_ROWS_COUNT(fullTableName)); + const limit = getLimit(quantity, recordSamplingSettings); + const sampledDocs = await db.query(queryConstants.GET_SAMPLED_DATA(fullTableName), [limit]); + + return sampledDocs.rows; }, }; @@ -102,74 +164,4 @@ const isSystemSchema = schema_name => { return false; }; -const prepareStorageParameters = reloptions => { - if (!reloptions) { - return null; - } - - const options = _.fromPairs(_.map(reloptions, splitByEqualitySymbol)); - - const fillfactor = options.fillfactor; - const parallel_workers = options.parallel_workers; - const autovacuum_enabled = options.autovacuum_enabled; - const autovacuum = { - vacuum_index_cleanup: options.vacuum_index_cleanup, - vacuum_truncate: options.vacuum_truncate, - autovacuum_vacuum_threshold: options.autovacuum_vacuum_threshold, - autovacuum_vacuum_scale_factor: options.autovacuum_vacuum_scale_factor, - autovacuum_vacuum_insert_threshold: options.autovacuum_vacuum_insert_threshold, - autovacuum_vacuum_insert_scale_factor: options.autovacuum_vacuum_insert_scale_factor, - autovacuum_analyze_threshold: options.autovacuum_analyze_threshold, - autovacuum_analyze_scale_factor: options.autovacuum_analyze_scale_factor, - autovacuum_vacuum_cost_delay: options.autovacuum_vacuum_cost_delay, - autovacuum_vacuum_cost_limit: options.autovacuum_vacuum_cost_limit, - autovacuum_freeze_min_age: options.autovacuum_freeze_min_age, - autovacuum_freeze_max_age: options.autovacuum_freeze_max_age, - autovacuum_freeze_table_age: options.autovacuum_freeze_table_age, - autovacuum_multixact_freeze_min_age: options.autovacuum_multixact_freeze_min_age, - autovacuum_multixact_freeze_max_age: options.autovacuum_multixact_freeze_max_age, - autovacuum_multixact_freeze_table_age: options.autovacuum_multixact_freeze_table_age, - log_autovacuum_min_duration: options.log_autovacuum_min_duration, - }; - const user_catalog_table = options.user_catalog_table; - const toast_autovacuum_enabled = options['toast.autovacuum_enabled']; - const toast = { - toast_tuple_target: options.toast_tuple_target, - toast_vacuum_index_cleanup: options['toast.vacuum_index_cleanup'], - toast_vacuum_truncate: options['toast.vacuum_truncate'], - toast_autovacuum_vacuum_threshold: options['toast.autovacuum_vacuum_threshold'], - toast_autovacuum_vacuum_scale_factor: options['toast.autovacuum_vacuum_scale_factor'], - toast_autovacuum_vacuum_insert_threshold: options['toast.autovacuum_vacuum_insert_threshold'], - toast_autovacuum_vacuum_insert_scale_factor: options['toast.autovacuum_vacuum_insert_scale_factor'], - toast_autovacuum_vacuum_cost_delay: options['toast.autovacuum_vacuum_cost_delay'], - toast_autovacuum_vacuum_cost_limit: options['toast.autovacuum_vacuum_cost_limit'], - toast_autovacuum_freeze_min_age: options['toast.autovacuum_freeze_min_age'], - toast_autovacuum_freeze_max_age: options['toast.autovacuum_freeze_max_age'], - toast_autovacuum_freeze_table_age: options['toast.autovacuum_freeze_table_age'], - toast_autovacuum_multixact_freeze_min_age: options['toast.autovacuum_multixact_freeze_min_age'], - toast_autovacuum_multixact_freeze_max_age: options['toast.autovacuum_multixact_freeze_max_age'], - toast_autovacuum_multixact_freeze_table_age: options['toast.autovacuum_multixact_freeze_table_age'], - toast_log_autovacuum_min_duration: options['toast.log_autovacuum_min_duration'], - }; - - const storage_parameter = { - fillfactor, - parallel_workers, - autovacuum_enabled, - autovacuum: clearEmptyPropertiesInObject(autovacuum), - toast_autovacuum_enabled, - toast: clearEmptyPropertiesInObject(toast), - user_catalog_table, - }; - - return clearEmptyPropertiesInObject(storage_parameter); -}; - -const splitByEqualitySymbol = item => _.split(item, '='); - -const clearEmptyPropertiesInObject = obj => - _.chain(obj) - .toPairs() - .filter(([key, value]) => Boolean(value)) - .fromPairs() - .value(); +const getFirstRow = result => _.first(result.rows); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 0d6826c..b578cd7 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -1,11 +1,39 @@ module.exports = { PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', - GET_TABLE_NAMES: 'SELECT * FROM information_schema.tables WHERE table_schema = $1 ORDER BY table_name;', + GET_TABLE_NAMES: ` + SELECT t.table_name, t.table_type, pc.relispartition AS is_table_partitioned + FROM information_schema.tables as t + INNER JOIN pg_class as pc + ON t.table_name = pc.relname + INNER JOIN pg_namespace AS pn + ON t.table_schema = pn.nspname + WHERE t.table_schema = $1 AND pn.nspname = $1 + ORDER BY t.table_name;`, GET_NAMESPACE_OID: 'SELECT oid FROM pg_namespace WHERE nspname = $1', - GET_TABLE_LEVEL_DATA: `SELECT pc.relpersistence, pc.reloptions, pt.spcname - FROM pg_class AS pc - LEFT JOIN pg_tablespace AS pt - ON pc.reltablespace = pt.oid - WHERE pc.relname = $1 AND pc.relnamespace = $2;`, + GET_TABLE_LEVEL_DATA: ` + SELECT pc.oid, pc.relpersistence, pc.reloptions, pt.spcname + FROM pg_class AS pc + LEFT JOIN pg_tablespace AS pt + ON pc.reltablespace = pt.oid + WHERE pc.relname = $1 AND pc.relnamespace = $2;`, + GET_TABLE_PARTITION_DATA: ` + SELECT partstrat as partition_type, + partattrs::int2[] as partition_attributes_positions, + pg_get_expr(partexprs, partrelid) AS expressions + FROM pg_partitioned_table + WHERE partrelid = $1;`, + GET_TABLE_ATTRIBUTES_WITH_POSITIONS: ` + SELECT attname as name, attnum as position + FROM pg_attribute + WHERE attrelid = $1 AND attnum > 0;`, + GET_TABLE_COLUMNS: ` + SELECT ic.*, pa.attndims FROM information_schema.columns AS ic + INNER JOIN pg_attribute AS pa + ON pa.attname = ic.column_name + WHERE ic.table_name = $1 AND table_schema = $2 AND pa.attrelid = $3 + ORDER BY ordinal_position;`, + GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, + GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) FROM ${fullTableName};`, + GET_SAMPLED_DATA: fullTableName => `SELECT * FROM ${fullTableName} LIMIT $1;`, }; diff --git a/types/char.json b/types/char.json index 5ef17cd..f3ac3b4 100644 --- a/types/char.json +++ b/types/char.json @@ -13,11 +13,7 @@ "childRelationships": [], "foreignCollection": "", "foreignField": [], - "enum": [], - "sample":"", - "comments":"", "mode": "varchar", - "length": 10, - "subtype": "string" + "length": 10 } } \ No newline at end of file diff --git a/types/json.json b/types/json.json index 7262d19..de993e4 100644 --- a/types/json.json +++ b/types/json.json @@ -2,7 +2,7 @@ "name": "json", "erdAbbreviation": "", "dtdAbbreviation": "{...}", - "parentType": "document", + "parentType": "string", "default": true, "defaultValues": { "primaryKey": false, @@ -12,12 +12,9 @@ "foreignCollection": "", "foreignField": [], "enabled": true, - "properties": [], - "dependencies": [], - "minProperties": "", - "maxProperties": "", "additionalProperties": false, - "enum": [] + "mode": "json", + "subtype": "object" }, "subtypes": { "object": { From b400a2c51e1c971c9f0482de5f8221b9e6f8aeba Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 29 Sep 2021 11:20:00 +0300 Subject: [PATCH 07/69] RE: fixed table partitioning --- .../entity_level/entityLevelConfig.json | 46 ++++++++----------- .../helpers/postgresHelpers/tableHelper.js | 17 ++++--- reverse_engineering/helpers/queryConstants.js | 2 +- 3 files changed, 31 insertions(+), 34 deletions(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index d5688f7..209fcdb 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -153,13 +153,14 @@ making sure that you maintain a proper JSON format. { "propertyName": "Partitioning", "propertyKeyword": "partitioning", - "propertyType": "block", + "propertyType": "group", + "groupItemLimit": 1, "propertyTooltip": "Determines how a partitioned table's rows are distributed across partitions", "structure": [ { - "propertyName": "Partition type", - "propertyKeyword": "partitionType", - "propertyTooltip": "Choose the desired partition type", + "propertyName": "Partition method", + "propertyKeyword": "partitionMethod", + "propertyTooltip": "Choose the desired partition method", "propertyType": "select", "defaultValue": "", "options": [ @@ -169,23 +170,25 @@ making sure that you maintain a proper JSON format. "HASH" ] }, + { + "propertyName": "Partition by", + "propertyKeyword": "partitionBy", + "propertyTooltip": "", + "propertyType": "select", + "defaultValue": "", + "options": [ + "keys", + "expression" + ] + }, { "propertyName": "Partition key", "propertyKeyword": "compositePartitionKey", "propertyType": "primaryKeySetter", "abbr": "pk,PK", "dependency": { - "type": "or", - "values": [ - { - "key": "partitioning_expression", - "value": false - }, - { - "key": "partitioning_expression", - "exists": false - } - ] + "key": "partitionBy", + "value": "keys" } }, { @@ -196,17 +199,8 @@ making sure that you maintain a proper JSON format. "template": "textarea", "markdown": false, "dependency": { - "type": "or", - "values": [ - { - "key": "compositePartitionKey", - "value": false - }, - { - "key": "compositePartitionKey", - "exists": false - } - ] + "key": "partitionBy", + "value": "expression" } } ] diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index cb3cdec..0d38ea9 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -74,7 +74,7 @@ const prepareTablePartition = (partitionResult, tableAttributesWithPositions) => return null; } - const partitionType = getPartitionType(partitionResult); + const partitionMethod = getPartitionMethod(partitionResult); const isExpression = _.some(partitionResult.partition_attributes_positions, position => position === 0); const key = isExpression ? 'partitioning_expression' : 'compositePartitionKey'; const value = isExpression @@ -84,14 +84,17 @@ const prepareTablePartition = (partitionResult, tableAttributesWithPositions) => getAttributeNameByPosition(tableAttributesWithPositions) ); - return { - partitionType, - [key]: value, - }; + return [ + { + partitionMethod, + partitionBy: isExpression ? 'expression' : 'keys', + [key]: value, + }, + ]; }; -const getPartitionType = partitionResult => { - const type = partitionResult.partition_type; +const getPartitionMethod = partitionResult => { + const type = partitionResult.partition_method; switch (type) { case 'h': diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index b578cd7..c8b1383 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -18,7 +18,7 @@ module.exports = { ON pc.reltablespace = pt.oid WHERE pc.relname = $1 AND pc.relnamespace = $2;`, GET_TABLE_PARTITION_DATA: ` - SELECT partstrat as partition_type, + SELECT partstrat as partition_method, partattrs::int2[] as partition_attributes_positions, pg_get_expr(partexprs, partrelid) AS expressions FROM pg_partitioned_table From ec54c985f0d8da4f1e22a199320a1c6615272ba5 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 29 Sep 2021 11:54:39 +0300 Subject: [PATCH 08/69] RE: added handling of inherits property --- reverse_engineering/helpers/postgresService.js | 7 ++++++- reverse_engineering/helpers/queryConstants.js | 5 +++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 2c0a225..6b6a34e 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -100,6 +100,7 @@ module.exports = { const partitionResult = getFirstRow(await db.query(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid])); const tableAttributes = (await db.query(queryConstants.GET_TABLE_ATTRIBUTES_WITH_POSITIONS, [tableOid])).rows; const descriptionResult = await db.query(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid]); + const inheritsResult = getFirstRow(await db.query(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid])); const partitioning = prepareTablePartition(partitionResult, tableAttributes); @@ -107,7 +108,8 @@ module.exports = { const unlogged = rawTableData.relpersistence === 'u'; const storage_parameter = prepareStorageParameters(rawTableData.reloptions); const table_tablespace_name = result.spcname; - const description = getFirstRow(descriptionResult); + const description = getDescriptionFromResult(descriptionResult); + const inherits = inheritsResult?.parent_table_name; const tableData = { temporary, @@ -116,6 +118,7 @@ module.exports = { table_tablespace_name, partitioning, description, + inherits, }; const entityLevel = clearEmptyPropertiesInObject(tableData); @@ -165,3 +168,5 @@ const isSystemSchema = schema_name => { }; const getFirstRow = result => _.first(result.rows); + +const getDescriptionFromResult = result => getFirstRow(result)?.obj_description; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index c8b1383..ca8ce50 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -36,4 +36,9 @@ module.exports = { GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) FROM ${fullTableName};`, GET_SAMPLED_DATA: fullTableName => `SELECT * FROM ${fullTableName} LIMIT $1;`, + GET_INHERITS_PARENT_TABLE_NAME: ` + SELECT pc.relname AS parent_table_name FROM pg_inherits AS pi + INNER JOIN pg_class AS pc + ON pc.oid = pi.inhparent + WHERE pi.inhrelid = $1;`, }; From b4fb4e2ddc7288b6fbdd13a8737b70212f7fdc4b Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 29 Sep 2021 14:42:44 +0300 Subject: [PATCH 09/69] RE: added constraints support --- .../entity_level/entityLevelConfig.json | 76 ++++++++++--------- .../helpers/postgresHelpers/tableHelper.js | 67 ++++++++++++++++ .../helpers/postgresService.js | 4 + reverse_engineering/helpers/queryConstants.js | 15 ++++ 4 files changed, 126 insertions(+), 36 deletions(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 209fcdb..716aa72 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -662,27 +662,26 @@ making sure that you maintain a proper JSON format. "propertyName": "Key", "propertyKeyword": "compositePrimaryKey", "propertyType": "primaryKeySetter", - "abbr": "pk", - "attributeList": [ - "ascending", - "descending" - ] + "abbr": "pk" + }, + { + "propertyName": "Include non-key columns", + "propertyKeyword": "include", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", + "propertyName": "With storage parameters", + "propertyKeyword": "indexStorageParameters", + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Index tablespace", + "propertyKeyword": "indexTablespace", "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] + "propertyType": "text" }, { "propertyName": "Comment", @@ -710,27 +709,26 @@ making sure that you maintain a proper JSON format. "propertyName": "Key", "propertyKeyword": "compositeUniqueKey", "propertyType": "primaryKeySetter", - "abbr": " ", - "attributeList": [ - "ascending", - "descending" - ] + "abbr": " " }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", + "propertyName": "Include non-key columns", + "propertyKeyword": "include", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With storage parameters", + "propertyKeyword": "indexStorageParameters", + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Index tablespace", + "propertyKeyword": "indexTablespace", "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] + "propertyType": "text" }, { "propertyName": "Comment", @@ -1057,6 +1055,12 @@ making sure that you maintain a proper JSON format. "template": "textarea", "markdown": false }, + { + "propertyName": "No inherit", + "propertyKeyword": "noInherit", + "propertyTooltip": "No inherit", + "propertyType": "checkbox" + }, { "propertyName": "Comments", "propertyKeyword": "constrComments", diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 0d38ea9..59b4a28 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -144,10 +144,77 @@ const getLimit = (count, recordSamplingSettings) => { return size; }; +const prepareTableConstraints = (constraintsResult, attributesWithPositions) => { + return _.reduce( + constraintsResult, + (entityConstraints, constraint) => { + switch (constraint.constraint_type) { + case 'c': + return { + ...entityConstraints, + chkConstr: [...entityConstraints.chkConstr, getCheckConstraint(constraint)], + }; + case 'p': + return { + ...entityConstraints, + primaryKey: [ + ...entityConstraints.primaryKey, + getPrimaryKeyConstraint(constraint, attributesWithPositions), + ], + }; + case 'u': + return { + ...entityConstraints, + uniqueKey: [ + ...entityConstraints.uniqueKey, + getUniqueKeyConstraint(constraint, attributesWithPositions), + ], + }; + default: + return entityConstraints; + } + }, + { + chkConstr: [], + uniqueKey: [], + primaryKey: [], + } + ); +}; + +const getPrimaryKeyConstraint = (constraint, attributesWithPositions) => { + return { + constraintName: constraint.constraint_name, + compositePrimaryKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(attributesWithPositions)), + indexStorageParameters: _.join(constraint.storage_parameters, ','), + indexTablespace: constraint.tablespace, + }; +}; + +const getUniqueKeyConstraint = (constraint, attributesWithPositions) => { + return { + constraintName: constraint.constraint_name, + compositeUniqueKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(attributesWithPositions)), + indexStorageParameters: _.join(constraint.storage_parameters, ','), + indexTablespace: constraint.tablespace, + indexComment: constraint.description, + }; +}; + +const getCheckConstraint = constraint => { + return { + chkConstrName: constraint.constraint_name, + constrExpression: constraint.expression, + noInherit: constraint.no_inherit, + constrDescription: constraint.description, + }; +}; + module.exports = { prepareStorageParameters, prepareTablePartition, setDependencies, checkHaveJsonTypes, + prepareTableConstraints, getLimit, }; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 6b6a34e..f46f8c0 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -15,6 +15,7 @@ const { prepareStorageParameters, prepareTablePartition, checkHaveJsonTypes, + prepareTableConstraints, getLimit, } = require('./postgresHelpers/tableHelper'); const queryConstants = require('./queryConstants'); @@ -101,6 +102,7 @@ module.exports = { const tableAttributes = (await db.query(queryConstants.GET_TABLE_ATTRIBUTES_WITH_POSITIONS, [tableOid])).rows; const descriptionResult = await db.query(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid]); const inheritsResult = getFirstRow(await db.query(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid])); + const tableConstraintsResult = (await db.query(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid])).rows; const partitioning = prepareTablePartition(partitionResult, tableAttributes); @@ -110,6 +112,7 @@ module.exports = { const table_tablespace_name = result.spcname; const description = getDescriptionFromResult(descriptionResult); const inherits = inheritsResult?.parent_table_name; + const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableAttributes); const tableData = { temporary, @@ -119,6 +122,7 @@ module.exports = { partitioning, description, inherits, + ...tableConstraint, }; const entityLevel = clearEmptyPropertiesInObject(tableData); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index ca8ce50..37b2017 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -41,4 +41,19 @@ module.exports = { INNER JOIN pg_class AS pc ON pc.oid = pi.inhparent WHERE pi.inhrelid = $1;`, + GET_TABLE_CONSTRAINTS: ` + SELECT pcon.conname AS constraint_name, + pcon.contype AS constraint_type, + pcon.connoinherit AS no_inherit, + pcon.conkey AS constraint_keys, + pg_get_expr(pcon.conbin, pcon.conrelid) AS expression, + obj_description(pcon.oid) AS description, + pc.reloptions AS storage_parameters, + pt.spcname AS tablespace + FROM pg_constraint AS pcon + LEFT JOIN pg_class AS pc + ON pcon.conindid = pc.oid + LEFT JOIN pg_tablespace AS pt + ON pc.reltablespace = pt.oid + WHERE pcon.conrelid = $1;`, }; From a506d8dbc3c0301ae420290cad0cf30e00753fe1 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Thu, 30 Sep 2021 14:56:37 +0300 Subject: [PATCH 10/69] Changed configuration for indexes storage parameters --- .../entity_level/entityLevelConfig.json | 171 ++++++++++-------- 1 file changed, 95 insertions(+), 76 deletions(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 716aa72..0d77de2 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -709,7 +709,7 @@ making sure that you maintain a proper JSON format. "propertyName": "Key", "propertyKeyword": "compositeUniqueKey", "propertyType": "primaryKeySetter", - "abbr": " " + "abbr": "uk" }, { "propertyName": "Include non-key columns", @@ -830,7 +830,7 @@ making sure that you maintain a proper JSON format. "DESC" ] }, - "mullsOrder": { + "nullsOrder": { "propertyType": "select", "options": [ "", @@ -890,32 +890,87 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "deduplicate_items", "propertyType": "checkbox", "propertyTooltip": "Declare the table as an additional catalog table for purposes of logical replication.", - "defaultValue": true, - "dependency": { - "type": "or", - "values": [ - { - "key": "index_method", - "value": "btree" - } - ] - } + "defaultValue": true + } + ], + "dependency": { + "key": "index_method", + "value": "btree" + } + }, + { + "propertyName": "With storage parameters", + "propertyKeyword": "index_storage_parameter", + "propertyType": "block", + "propertyTooltip": "The optional WITH clause specifies storage parameters for the index. Each index method has its own set of allowed storage parameters.", + "structure": [ + { + "propertyName": "Fill factor", + "propertyKeyword": "index_fillfactor", + "propertyType": "numeric", + "valueType": "number", + "propertyTooltip": "A percentage between 10 and 100. The fillfactor for an index is a percentage that determines how full the index method will try to pack index pages.", + "minValue": 10, + "maxValue": 100, + "step": 1, + "defaultValue": 100 + } + ], + "dependency": { + "key": "index_method", + "value": [ + "hash", + "spgist" + ] + } + }, + { + "propertyName": "With storage parameters", + "propertyKeyword": "index_storage_parameter", + "propertyType": "block", + "propertyTooltip": "The optional WITH clause specifies storage parameters for the index. Each index method has its own set of allowed storage parameters.", + "structure": [ + { + "propertyName": "Fill factor", + "propertyKeyword": "index_fillfactor", + "propertyType": "numeric", + "valueType": "number", + "propertyTooltip": "A percentage between 10 and 100. The fillfactor for an index is a percentage that determines how full the index method will try to pack index pages.", + "minValue": 10, + "maxValue": 100, + "step": 1, + "defaultValue": 100 }, + { + "propertyName": "Buffering", + "propertyKeyword": "index_buffering", + "propertyType": "select", + "propertyTooltip": "Determines whether the buffering build technique is used to build the index.", + "options": [ + "AUTO", + "OFF", + "ON" + ], + "defaultValue": "AUTO" + } + ], + "dependency": { + "key": "index_method", + "value": "gist" + } + }, + { + "propertyName": "With storage parameters", + "propertyKeyword": "index_storage_parameter", + "propertyType": "block", + "propertyTooltip": "The optional WITH clause specifies storage parameters for the index. Each index method has its own set of allowed storage parameters.", + "structure": [ { "propertyName": "Fast update", "propertyKeyword": "fastupdate", "propertyType": "checkbox", "propertyTooltip": "This setting controls usage of the fast update technique.", - "defaultValue": true, - "dependency": { - "type": "or", - "values": [ - { - "key": "index_method", - "value": "gin" - } - ] - } + "defaultValue": true }, { "propertyName": "Gin pending list limit", @@ -925,21 +980,20 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "This value is specified in kilobytes. Sets the maximum size of a GIN index's pending list, which is used when fastupdate is enabled.", "minValue": 0, "step": 1, - "defaultValue": 4000, - "dependency": { - "type": "and", - "values": [ - { - "key": "index_method", - "value": "gin" - }, - { - "key": "fastupdate", - "value": true - } - ] - } - }, + "defaultValue": 4000 + } + ], + "dependency": { + "key": "index_method", + "value": "gin" + } + }, + { + "propertyName": "With storage parameters", + "propertyKeyword": "index_storage_parameter", + "propertyType": "block", + "propertyTooltip": "The optional WITH clause specifies storage parameters for the index. Each index method has its own set of allowed storage parameters.", + "structure": [ { "propertyName": "Pages per range", "propertyKeyword": "pages_per_range", @@ -948,54 +1002,19 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Defines the number of table blocks that make up one block range for each entry of a BRIN index.", "minValue": 0, "step": 1, - "defaultValue": 128, - "dependency": { - "type": "or", - "values": [ - { - "key": "index_method", - "value": "brin" - } - ] - } + "defaultValue": 128 }, { "propertyName": "Auto summarize", "propertyKeyword": "autosummarize ", "propertyType": "checkbox", "propertyTooltip": "Defines whether a summarization run is invoked for the previous page range whenever an insertion is detected on the next one.", - "defaultValue": true, - "dependency": { - "type": "or", - "values": [ - { - "key": "index_method", - "value": "brin" - } - ] - } + "defaultValue": true } ], "dependency": { - "type": "or", - "values": [ - { - "key": "index_method", - "value": "btree" - }, - { - "key": "index_method", - "value": "hash" - }, - { - "key": "index_method", - "value": "gist" - }, - { - "key": "index_method", - "value": "spgist" - } - ] + "key": "index_method", + "value": "brin" } }, { From 4cc575d7e193eec4ec3d52dd391f31603c95fd60 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Thu, 30 Sep 2021 14:57:32 +0300 Subject: [PATCH 11/69] RE: added query tolerancy for not important data, added RE of table indexes --- reverse_engineering/api.js | 9 +- reverse_engineering/helpers/db.js | 27 ++++- .../helpers/postgresHelpers/columnHelper.js | 4 +- .../helpers/postgresHelpers/tableHelper.js | 101 ++++++++++++++-- .../helpers/postgresService.js | 86 +++++++------- reverse_engineering/helpers/queryConstants.js | 108 +++++++++++++++--- 6 files changed, 257 insertions(+), 78 deletions(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 213299c..6295dd9 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -143,7 +143,8 @@ const createLogger = ({ title, logger, hiddenKeys }) => { }; }; -const prepareError = error => ({ - message: error.message, - stack: error.stack, -}); +const prepareError = error => { + error = JSON.stringify(error, Object.getOwnPropertyNames(error)); + error = JSON.parse(error); + return error; +}; diff --git a/reverse_engineering/helpers/db.js b/reverse_engineering/helpers/db.js index 36627a1..d2f4744 100644 --- a/reverse_engineering/helpers/db.js +++ b/reverse_engineering/helpers/db.js @@ -1,3 +1,5 @@ +const queryConstants = require('./queryConstants'); + let pool = null; let logger = null; @@ -16,15 +18,32 @@ module.exports = { } }, - async query(query, params) { - logger.info('Execute query', { query, params }); + async query(query, params, firstRow = false) { + const queryName = queryConstants.getQueryName(query); + + logger.info('Execute query', { queryName, params }); const start = Date.now(); const result = await pool.query(query, params); const duration = Date.now() - start; - logger.info('Query executed', { query, params, duration, rowsCount: result.rowCount }); + logger.info('Query executed', { queryName, params, duration, rowsCount: result.rowCount }); + + const rows = result.rows || []; + + return firstRow ? rows[0] : rows; + }, + + async queryTolerant(query, params, firstRow = false) { + try { + return await this.query(query, params, firstRow); + } catch (error) { + error.query = query; + error.params = params; - return result; + logger.error(error); + + return null; + } }, }; diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 5983799..01cf08f 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -20,7 +20,7 @@ const columnPropertiesMapper = { interval_type: 'intervalOptions', collation_name: 'collationRule', column_name: 'name', - attndims: 'numberOfArrayDimensions', + number_of_array_dimensions: 'numberOfArrayDimensions', udt_name: 'udt_name', character_maximum_length: 'length', }; @@ -148,7 +148,7 @@ const mapType = type => { case 'regtype': return { type: 'oid', mode: type }; default: - return { type }; + return { type: 'char', mode: 'varchar' }; } }; diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 59b4a28..234d51b 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -69,7 +69,7 @@ const prepareStorageParameters = reloptions => { return clearEmptyPropertiesInObject(storage_parameter); }; -const prepareTablePartition = (partitionResult, tableAttributesWithPositions) => { +const prepareTablePartition = (partitionResult, tableColumns) => { if (!partitionResult) { return null; } @@ -78,10 +78,10 @@ const prepareTablePartition = (partitionResult, tableAttributesWithPositions) => const isExpression = _.some(partitionResult.partition_attributes_positions, position => position === 0); const key = isExpression ? 'partitioning_expression' : 'compositePartitionKey'; const value = isExpression - ? getPartitionExpression(partitionResult, tableAttributesWithPositions) + ? getPartitionExpression(partitionResult, tableColumns) : _.map( partitionResult.partition_attributes_positions, - getAttributeNameByPosition(tableAttributesWithPositions) + getAttributeNameByPosition(tableColumns) ); return [ @@ -108,7 +108,7 @@ const getPartitionMethod = partitionResult => { } }; -const getPartitionExpression = (partitionResult, tableAttributesWithPositions) => { +const getPartitionExpression = (partitionResult, tableColumns) => { let expressionIndex = 0; const expressions = _.split(partitionResult.expressions, ','); @@ -121,13 +121,13 @@ const getPartitionExpression = (partitionResult, tableAttributesWithPositions) = return expression; } - return getAttributeNameByPosition(tableAttributesWithPositions)(attributePosition); + return getAttributeNameByPosition(tableColumns)(attributePosition); }) .join(',') .value(); }; -const getAttributeNameByPosition = attributes => position => _.find(attributes, { position })?.name; +const getAttributeNameByPosition = attributes => position => _.find(attributes, { ordinal_position: position })?.column_name; const splitByEqualitySymbol = item => _.split(item, '='); @@ -182,19 +182,19 @@ const prepareTableConstraints = (constraintsResult, attributesWithPositions) => ); }; -const getPrimaryKeyConstraint = (constraint, attributesWithPositions) => { +const getPrimaryKeyConstraint = (constraint, tableColumns) => { return { constraintName: constraint.constraint_name, - compositePrimaryKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(attributesWithPositions)), + compositePrimaryKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(tableColumns)), indexStorageParameters: _.join(constraint.storage_parameters, ','), indexTablespace: constraint.tablespace, }; }; -const getUniqueKeyConstraint = (constraint, attributesWithPositions) => { +const getUniqueKeyConstraint = (constraint, tableColumns) => { return { constraintName: constraint.constraint_name, - compositeUniqueKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(attributesWithPositions)), + compositeUniqueKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(tableColumns)), indexStorageParameters: _.join(constraint.storage_parameters, ','), indexTablespace: constraint.tablespace, indexComment: constraint.description, @@ -210,11 +210,92 @@ const getCheckConstraint = constraint => { }; }; +const prepareTableIndexes = (tableIndexesResult) => { + return _.map(tableIndexesResult, indexData => { + const index = { + indxName: indexData.indexname, + index_method: indexData.index_method, + unique: indexData.index_unique ?? false, + columns: mapIndexColumns(indexData), + index_tablespace_name: indexData.tablespace_name, + index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters) + }; + + return clearEmptyPropertiesInObject(index) + }); +}; + +const mapIndexColumns = indexData => { + return _.chain(indexData.columns) + .map((columnName, itemIndex) => { + if (!columnName) { + return; + } + + const sortOrder = _.get(indexData, `ascending.${itemIndex}`, false) ? 'ASC' : 'DESC'; + const nullsOrder = getNullsOrder(_.get(indexData, `nulls_first.${itemIndex}`)); + const opclass = _.get(indexData, `opclasses.${itemIndex}`); + + return { + name: columnName, + sortOrder, + nullsOrder, + opclass, + }; + }) + .compact() + .value(); +}; + +const getNullsOrder = nulls_first => { + if (_.isNil(nulls_first)) { + return ''; + } + + return nulls_first ? 'NULLS FIRST' : 'NULLS LAST'; +}; + +const getIndexStorageParameters = (storageParameters) => { + if(!storageParameters) { + return null + } + + const params = _.fromPairs(_.map(storageParameters, param => splitByEqualitySymbol(param))); + + const data = { + index_fillfactor: params.fillfactor, + deduplicate_items: params.deduplicate_items, + index_buffering: params.index_buffering, + fastupdate: params.fastupdate, + gin_pending_list_limit: params.gin_pending_list_limit, + pages_per_range: params.pages_per_range, + autosummarize: params.autosummarize + } + + return clearEmptyPropertiesInObject(data) +} + +const prepareTableLevelData = (tableLevelData) => { + const temporary = tableLevelData?.relpersistence === 't'; + const unlogged = tableLevelData?.relpersistence === 'u'; + const storage_parameter = prepareStorageParameters(tableLevelData?.reloptions); + const table_tablespace_name = tableLevelData?.spcname; + + return { + temporary, + unlogged, + storage_parameter, + table_tablespace_name, + } +} + module.exports = { prepareStorageParameters, prepareTablePartition, setDependencies, checkHaveJsonTypes, prepareTableConstraints, + prepareTableLevelData, + prepareTableIndexes, getLimit, }; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index f46f8c0..6b9b45b 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -12,11 +12,12 @@ const { } = require('./postgresHelpers/common'); const { setDependencies: setDependenciesInTableHelper, - prepareStorageParameters, prepareTablePartition, checkHaveJsonTypes, prepareTableConstraints, getLimit, + prepareTableLevelData, + prepareTableIndexes, } = require('./postgresHelpers/tableHelper'); const queryConstants = require('./queryConstants'); @@ -56,21 +57,18 @@ module.exports = { }, async getAllSchemasNames() { - const result = await db.query(queryConstants.GET_SCHEMA_NAMES); + const schemaNames = await db.query(queryConstants.GET_SCHEMA_NAMES); - return result.rows.map(({ schema_name }) => schema_name).filter(schemaName => !isSystemSchema(schemaName)); + return schemaNames.map(({ schema_name }) => schema_name).filter(schemaName => !isSystemSchema(schemaName)); }, async getTablesNames(schemaName) { - const result = await db.query(queryConstants.GET_TABLE_NAMES, [schemaName]); + const tables = await db.query(queryConstants.GET_TABLE_NAMES, [schemaName]); const tableTypesToExclude = ['FOREIGN TABLE']; - return result.rows - .filter( - ({ table_type, is_table_partitioned }) => - !_.includes(tableTypesToExclude, table_type) && !is_table_partitioned - ) + return tables + .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) .map(({ table_name, table_type }) => { if (isViewByTableType(table_type)) { return `${table_name}${VIEW_SUFFIX}`; @@ -81,8 +79,8 @@ module.exports = { }, async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { - const schemaOidResult = await db.query(queryConstants.GET_NAMESPACE_OID, [schemaName]); - const schemaOid = getFirstRow(schemaOidResult).oid; + const schemaOidResult = await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true); + const schemaOid = schemaOidResult.oid; const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); @@ -95,40 +93,39 @@ module.exports = { }, async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, tableName) { - const result = await db.query(queryConstants.GET_TABLE_LEVEL_DATA, [tableName, schemaOid]); - const rawTableData = getFirstRow(result); - const tableOid = rawTableData.oid; - const partitionResult = getFirstRow(await db.query(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid])); - const tableAttributes = (await db.query(queryConstants.GET_TABLE_ATTRIBUTES_WITH_POSITIONS, [tableOid])).rows; - const descriptionResult = await db.query(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid]); - const inheritsResult = getFirstRow(await db.query(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid])); - const tableConstraintsResult = (await db.query(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid])).rows; - - const partitioning = prepareTablePartition(partitionResult, tableAttributes); - - const temporary = rawTableData.relpersistence === 't'; - const unlogged = rawTableData.relpersistence === 'u'; - const storage_parameter = prepareStorageParameters(rawTableData.reloptions); - const table_tablespace_name = result.spcname; + const tableLevelData = await db.queryTolerant( + queryConstants.GET_TABLE_LEVEL_DATA, + [tableName, schemaOid], + true + ); + const tableOid = tableLevelData?.oid; + + const partitionResult = await db.queryTolerant(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid], true); + const tableColumns = await this._getTableColumns(tableName, schemaName, tableOid); + const descriptionResult = await db.queryTolerant(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid], true); + const inheritsResult = await db.queryTolerant(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid], true); + const tableConstraintsResult = await db.queryTolerant(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid]); + const tableIndexesResult = await db.queryTolerant(queryConstants.GET_TABLE_INDEXES, [tableOid]); + + const partitioning = prepareTablePartition(partitionResult, tableColumns); + const tableLevelProperties = prepareTableLevelData(tableLevelData); const description = getDescriptionFromResult(descriptionResult); const inherits = inheritsResult?.parent_table_name; - const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableAttributes); + const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); + const tableIndexes = prepareTableIndexes(tableIndexesResult); const tableData = { - temporary, - unlogged, - storage_parameter, - table_tablespace_name, partitioning, description, inherits, + Indxs: tableIndexes, + ...tableLevelProperties, ...tableConstraint, }; const entityLevel = clearEmptyPropertiesInObject(tableData); - const columns = await db.query(queryConstants.GET_TABLE_COLUMNS, [tableName, schemaName, tableOid]); - let targetAttributes = columns.rows.map(mapColumnData); + let targetAttributes = tableColumns.map(mapColumnData); const hasJsonTypes = checkHaveJsonTypes(targetAttributes); let documents = []; @@ -146,13 +143,26 @@ module.exports = { }; }, + async _getTableColumns(tableName, schemaName, tableOid) { + const tableColumns = await db.query(queryConstants.GET_TABLE_COLUMNS, [tableName, schemaName]); + const tableColumnsAdditionalData = await db.queryTolerant(queryConstants.GET_TABLE_COLUMNS_ADDITIONAL_DATA, [ + tableOid, + ]); + + return _.map(tableColumns, (columnData, index) => { + return { + ...columnData, + ...(_.find(tableColumnsAdditionalData, { name: columnData.column_name }) || {}), + }; + }); + }, + async _getDocuments(schemaName, tableName, recordSamplingSettings) { const fullTableName = `${schemaName}.${tableName}`; - const quantity = await db.query(queryConstants.GET_ROWS_COUNT(fullTableName)); + const quantity = (await db.queryTolerant(queryConstants.GET_ROWS_COUNT(fullTableName), [], true))?.quantity || 0; const limit = getLimit(quantity, recordSamplingSettings); - const sampledDocs = await db.query(queryConstants.GET_SAMPLED_DATA(fullTableName), [limit]); - return sampledDocs.rows; + return await db.queryTolerant(queryConstants.GET_SAMPLED_DATA(fullTableName), [limit]); }, }; @@ -171,6 +181,4 @@ const isSystemSchema = schema_name => { return false; }; -const getFirstRow = result => _.first(result.rows); - -const getDescriptionFromResult = result => getFirstRow(result)?.obj_description; +const getDescriptionFromResult = result => result?.obj_description; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 37b2017..6b16e7e 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -1,15 +1,11 @@ -module.exports = { +const queryConstants = { PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', GET_TABLE_NAMES: ` - SELECT t.table_name, t.table_type, pc.relispartition AS is_table_partitioned - FROM information_schema.tables as t - INNER JOIN pg_class as pc - ON t.table_name = pc.relname - INNER JOIN pg_namespace AS pn - ON t.table_schema = pn.nspname - WHERE t.table_schema = $1 AND pn.nspname = $1 - ORDER BY t.table_name;`, + SELECT table_name, table_type + FROM information_schema.tables + WHERE table_schema = $1 + ORDER BY table_name;`, GET_NAMESPACE_OID: 'SELECT oid FROM pg_namespace WHERE nspname = $1', GET_TABLE_LEVEL_DATA: ` SELECT pc.oid, pc.relpersistence, pc.reloptions, pt.spcname @@ -23,18 +19,16 @@ module.exports = { pg_get_expr(partexprs, partrelid) AS expressions FROM pg_partitioned_table WHERE partrelid = $1;`, - GET_TABLE_ATTRIBUTES_WITH_POSITIONS: ` - SELECT attname as name, attnum as position - FROM pg_attribute - WHERE attrelid = $1 AND attnum > 0;`, GET_TABLE_COLUMNS: ` - SELECT ic.*, pa.attndims FROM information_schema.columns AS ic - INNER JOIN pg_attribute AS pa - ON pa.attname = ic.column_name - WHERE ic.table_name = $1 AND table_schema = $2 AND pa.attrelid = $3 - ORDER BY ordinal_position;`, + SELECT * FROM information_schema.columns + WHERE table_name = $1 AND table_schema = $2 + ORDER BY ordinal_position`, + GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` + SELECT attname AS name, attndims AS number_of_array_dimensions + FROM pg_attribute + WHERE attrelid = $1;`, GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, - GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) FROM ${fullTableName};`, + GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) AS quantity FROM ${fullTableName};`, GET_SAMPLED_DATA: fullTableName => `SELECT * FROM ${fullTableName} LIMIT $1;`, GET_INHERITS_PARENT_TABLE_NAME: ` SELECT pc.relname AS parent_table_name FROM pg_inherits AS pi @@ -56,4 +50,80 @@ module.exports = { LEFT JOIN pg_tablespace AS pt ON pc.reltablespace = pt.oid WHERE pcon.conrelid = $1;`, + GET_TABLE_INDEXES: ` + SELECT indexname, + index_method, + index_unique, + array_agg(attname + ORDER BY ord)::text[] AS columns, + array_agg(coll + ORDER BY ord) AS collations, + array_agg(opclass + ORDER BY ord) AS opclasses, + array_agg(expression + ORDER BY ord) AS expressions, + array_agg(ascending + ORDER BY ord) AS ascendings, + array_agg(nulls_first + ORDER BY ord) AS nulls_first, + reloptions AS storage_parameters, + tablespace_name + FROM + (SELECT ct.oid AS table_oid, + c.relname AS indexname, + m.amname AS index_method, + indexes.indisunique AS index_unique, + indexes.ord, + attribute.attname, + c.reloptions, + tablespace_t.spcname AS tablespace_name, + CASE + WHEN collation_namespace.nspname is not null THEN format('%I.%I',collation_namespace.nspname,collation_t.collname) + END AS coll, + CASE + WHEN opclass_t.opcname is not null THEN format('%I.%I',opclas_namespace.nspname,opclass_t.opcname) + END AS opclass, + CASE + WHEN indexes.ord > 0 THEN pg_index_column_has_property(indexes.indexrelid, indexes.key, 'asc') + END AS ascending, + CASE + WHEN indexes.ord > 0 THEN pg_index_column_has_property(indexes.indexrelid, indexes.key, 'nulls_first') + END AS nulls_first, + pg_get_indexdef(indexes.indexrelid, ord, false) AS expression + FROM + (SELECT *, + generate_series(1,array_length(i.indkey,1)) AS ord, + unnest(i.indkey) AS key, + unnest(i.indcollation) AS coll, + unnest(i.indclass) AS class, + unnest(i.indoption) AS option + FROM pg_index i) indexes + JOIN pg_class c ON (c.oid=indexes.indexrelid) + JOIN pg_class ct ON (ct.oid=indexes.indrelid) + JOIN pg_am m ON (m.oid=c.relam) + LEFT JOIN pg_attribute attribute ON (attribute.attrelid=indexes.indrelid + AND attribute.attnum=indexes.key) + LEFT JOIN pg_collation collation_t ON (collation_t.oid=indexes.coll) + LEFT JOIN pg_namespace collation_namespace ON (collation_namespace.oid=collation_t.collnamespace) + LEFT JOIN pg_opclass opclass_t ON (opclass_t.oid=indexes.class) + LEFT JOIN pg_namespace opclas_namespace ON (opclas_namespace.oid=opclass_t.opcnamespace) + LEFT JOIN pg_tablespace tablespace_t ON (tablespace_t.oid = c.reltablespace)) s2 + WHERE table_oid = $1 + GROUP BY indexname, + index_method, + index_unique, + reloptions, + tablespace_name;`, +}; + +const getQueryName = query => { + const queryEntry = + Object.entries(queryConstants).find(([queryName, constantQuery]) => query === constantQuery) || []; + + return queryEntry[0]; +}; + +module.exports = { + getQueryName, + ...queryConstants, }; From b084d4d1111f4012f62d4c3f4fee46a8f38a454d Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Thu, 30 Sep 2021 16:03:40 +0300 Subject: [PATCH 12/69] RE: addeed reversion of relationships --- reverse_engineering/api.js | 12 ++++++--- .../helpers/postgresHelpers/common.js | 4 +++ .../postgresHelpers/foreignKeysHelper.js | 27 +++++++++++++++++++ .../helpers/postgresHelpers/tableHelper.js | 11 ++++---- .../helpers/postgresService.js | 11 +++++++- reverse_engineering/helpers/queryConstants.js | 15 +++++++++++ 6 files changed, 70 insertions(+), 10 deletions(-) create mode 100644 reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 6295dd9..187d523 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -92,7 +92,7 @@ module.exports = { const collections = data.collectionData.collections; const schemasNames = data.collectionData.dataBaseNames; - const packages = await Promise.all( + const { packages, relationships } = await Promise.all( schemasNames.map(async schemaName => ({ schemaName, entities: await postgresService.retrieveEntitiesData( @@ -102,7 +102,11 @@ module.exports = { ), })) ).then(tablesDataPerSchema => { - return tablesDataPerSchema.flatMap(({ schemaName, entities }) => + const relationships = tablesDataPerSchema + .flatMap(({ entities }) => entities.map(entityData => entityData.relationships)) + .flat(); + + const packages = tablesDataPerSchema.flatMap(({ schemaName, entities }) => entities.map(entityData => ({ dbName: schemaName, collectionName: entityData.name, @@ -115,9 +119,11 @@ module.exports = { }, })) ); + + return { packages, relationships }; }); - callback(null, packages); + callback(null, packages, null, relationships); } catch (error) { logger.log('error', prepareError(error), 'Retrieve tables data'); callback(prepareError(error)); diff --git a/reverse_engineering/helpers/postgresHelpers/common.js b/reverse_engineering/helpers/postgresHelpers/common.js index 9ae07c5..374fc32 100644 --- a/reverse_engineering/helpers/postgresHelpers/common.js +++ b/reverse_engineering/helpers/postgresHelpers/common.js @@ -11,7 +11,11 @@ const clearEmptyPropertiesInObject = obj => .fromPairs() .value(); +const getColumnNameByPosition = columns => position => + _.find(columns, { ordinal_position: position })?.column_name; + module.exports = { clearEmptyPropertiesInObject, setDependencies, + getColumnNameByPosition }; diff --git a/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js b/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js new file mode 100644 index 0000000..20e7282 --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/foreignKeysHelper.js @@ -0,0 +1,27 @@ +const { getColumnNameByPosition } = require('./common'); + +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const prepareForeignKeys = (tableForeignKeys, tableName, schemaName, columns) => { + return _.map(tableForeignKeys, foreignKeyData => { + return { + relationshipName: foreignKeyData.relationship_name, + dbName: foreignKeyData.foreign_table_schema, + parentCollection: foreignKeyData.foreign_table_name, + parentField: foreignKeyData.foreign_columns, + childDbName: schemaName, + childCollection: tableName, + childField: _.map(foreignKeyData.table_columns_positions, getColumnNameByPosition(columns)), + relationshipType: 'Foreign Key', + }; + }); +}; + +module.exports = { + setDependencies, + prepareForeignKeys, +}; diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 234d51b..f4b633f 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -1,4 +1,4 @@ -const { clearEmptyPropertiesInObject } = require('./common'); +const { clearEmptyPropertiesInObject, getColumnNameByPosition } = require('./common'); let _ = null; @@ -81,7 +81,7 @@ const prepareTablePartition = (partitionResult, tableColumns) => { ? getPartitionExpression(partitionResult, tableColumns) : _.map( partitionResult.partition_attributes_positions, - getAttributeNameByPosition(tableColumns) + getColumnNameByPosition(tableColumns) ); return [ @@ -121,13 +121,12 @@ const getPartitionExpression = (partitionResult, tableColumns) => { return expression; } - return getAttributeNameByPosition(tableColumns)(attributePosition); + return getColumnNameByPosition(tableColumns)(attributePosition); }) .join(',') .value(); }; -const getAttributeNameByPosition = attributes => position => _.find(attributes, { ordinal_position: position })?.column_name; const splitByEqualitySymbol = item => _.split(item, '='); @@ -185,7 +184,7 @@ const prepareTableConstraints = (constraintsResult, attributesWithPositions) => const getPrimaryKeyConstraint = (constraint, tableColumns) => { return { constraintName: constraint.constraint_name, - compositePrimaryKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(tableColumns)), + compositePrimaryKey: _.map(constraint.constraint_keys, getColumnNameByPosition(tableColumns)), indexStorageParameters: _.join(constraint.storage_parameters, ','), indexTablespace: constraint.tablespace, }; @@ -194,7 +193,7 @@ const getPrimaryKeyConstraint = (constraint, tableColumns) => { const getUniqueKeyConstraint = (constraint, tableColumns) => { return { constraintName: constraint.constraint_name, - compositeUniqueKey: _.map(constraint.constraint_keys, getAttributeNameByPosition(tableColumns)), + compositeUniqueKey: _.map(constraint.constraint_keys, getColumnNameByPosition(tableColumns)), indexStorageParameters: _.join(constraint.storage_parameters, ','), indexTablespace: constraint.tablespace, indexComment: constraint.description, diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 6b9b45b..7cef60b 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -10,6 +10,10 @@ const { setDependencies: setDependenciesInCommonHelper, clearEmptyPropertiesInObject, } = require('./postgresHelpers/common'); +const { + setDependencies: setDependenciesInForeignKeysHelper, + prepareForeignKeys, +} = require('./postgresHelpers/foreignKeysHelper'); const { setDependencies: setDependenciesInTableHelper, prepareTablePartition, @@ -33,6 +37,7 @@ module.exports = { setDependenciesInCommonHelper(app); setDependenciesInTableHelper(app); setDependenciesInColumnHelper(app); + setDependenciesInForeignKeysHelper(app); }, async connect(connectionInfo, logger) { @@ -106,6 +111,7 @@ module.exports = { const inheritsResult = await db.queryTolerant(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid], true); const tableConstraintsResult = await db.queryTolerant(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid]); const tableIndexesResult = await db.queryTolerant(queryConstants.GET_TABLE_INDEXES, [tableOid]); + const tableForeignKeys = await db.queryTolerant(queryConstants.GET_TABLE_FOREIGN_KEYS, [tableOid]); const partitioning = prepareTablePartition(partitionResult, tableColumns); const tableLevelProperties = prepareTableLevelData(tableLevelData); @@ -113,6 +119,7 @@ module.exports = { const inherits = inheritsResult?.parent_table_name; const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); const tableIndexes = prepareTableIndexes(tableIndexesResult); + const relationships = prepareForeignKeys(tableForeignKeys, tableName, schemaName, tableColumns); const tableData = { partitioning, @@ -140,6 +147,7 @@ module.exports = { entityLevel, jsonSchema: getJsonSchema(targetAttributes), documents, + relationships }; }, @@ -159,7 +167,8 @@ module.exports = { async _getDocuments(schemaName, tableName, recordSamplingSettings) { const fullTableName = `${schemaName}.${tableName}`; - const quantity = (await db.queryTolerant(queryConstants.GET_ROWS_COUNT(fullTableName), [], true))?.quantity || 0; + const quantity = + (await db.queryTolerant(queryConstants.GET_ROWS_COUNT(fullTableName), [], true))?.quantity || 0; const limit = getLimit(quantity, recordSamplingSettings); return await db.queryTolerant(queryConstants.GET_SAMPLED_DATA(fullTableName), [limit]); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 6b16e7e..5bf3fe5 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -114,6 +114,21 @@ const queryConstants = { index_unique, reloptions, tablespace_name;`, + GET_TABLE_FOREIGN_KEYS: ` + SELECT pcon.conname AS relationship_name, + pcon.conkey AS table_columns_positions, + pc_foreign_table.relname AS foreign_table_name, + ARRAY( + SELECT column_name FROM unnest(pcon.confkey) AS column_position + JOIN information_schema.columns ON (ordinal_position = column_position) + WHERE table_name = pc_foreign_table.relname AND table_schema = foreign_table_namespace.nspname)::text[] AS foreign_columns, + foreign_table_namespace.nspname AS foreign_table_schema + FROM pg_constraint AS pcon + LEFT JOIN pg_class AS pc ON pcon.conindid = pc.oid + LEFT JOIN pg_tablespace AS pt ON pc.reltablespace = pt.oid + LEFT JOIN pg_class AS pc_foreign_table ON (pcon.confrelid = pc_foreign_table.oid) + JOIN pg_namespace AS foreign_table_namespace ON (pc_foreign_table.relnamespace = foreign_table_namespace.oid) + WHERE pcon.conrelid = $1 AND pcon.contype = 'f';`, }; const getQueryName = query => { From c8b08264712be6c6329abddd16cab850bf76cb45 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 1 Oct 2021 10:07:07 +0300 Subject: [PATCH 13/69] Added logging of Postgres version --- reverse_engineering/helpers/postgresService.js | 17 +++++++++++++---- reverse_engineering/helpers/queryConstants.js | 1 + 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 7cef60b..e34db39 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -40,12 +40,14 @@ module.exports = { setDependenciesInForeignKeysHelper(app); }, - async connect(connectionInfo, logger) { + async connect(connectionInfo, specificLogger) { const { pool, sshTunnel } = await createConnectionPool(connectionInfo); - db.initializePool(pool, logger); + db.initializePool(pool, specificLogger); currentSshTunnel = sshTunnel; - logger = logger; + logger = specificLogger; + + await this.logVersion(); }, async disconnect() { @@ -61,6 +63,13 @@ module.exports = { return db.query(queryConstants.PING); }, + async logVersion() { + const versionRow = await db.queryTolerant(queryConstants.GET_VERSION, [], true); + const version = versionRow?.version || 'Version not retrieved'; + + logger.info(`PostgreSQL version: ${version}`); + }, + async getAllSchemasNames() { const schemaNames = await db.query(queryConstants.GET_SCHEMA_NAMES); @@ -147,7 +156,7 @@ module.exports = { entityLevel, jsonSchema: getJsonSchema(targetAttributes), documents, - relationships + relationships, }; }, diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 5bf3fe5..848eeba 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -1,5 +1,6 @@ const queryConstants = { PING: 'SELECT schema_name FROM information_schema.schemata LIMIT 1;', + GET_VERSION: 'SELECT version()', GET_SCHEMA_NAMES: 'SELECT schema_name FROM information_schema.schemata;', GET_TABLE_NAMES: ` SELECT table_name, table_type From ead4724a48c9d37b39fb6b6948d469a892c05e9b Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 1 Oct 2021 14:35:22 +0300 Subject: [PATCH 14/69] RE: added RE of views --- .../view_level/viewLevelConfig.json | 2 +- reverse_engineering/api.js | 17 ++++-- .../helpers/postgresHelpers/columnHelper.js | 2 +- .../helpers/postgresHelpers/viewHelper.js | 52 +++++++++++++++++++ .../helpers/postgresService.js | 44 +++++++++++++--- reverse_engineering/helpers/queryConstants.js | 6 +++ types/boolean.json | 1 + types/enum.json | 1 + types/json.json | 4 +- types/object.json | 1 + types/xml.json | 11 ++++ 11 files changed, 125 insertions(+), 16 deletions(-) create mode 100644 reverse_engineering/helpers/postgresHelpers/viewHelper.js create mode 100644 types/xml.json diff --git a/properties_pane/view_level/viewLevelConfig.json b/properties_pane/view_level/viewLevelConfig.json index 2b23581..2a31af4 100644 --- a/properties_pane/view_level/viewLevelConfig.json +++ b/properties_pane/view_level/viewLevelConfig.json @@ -150,7 +150,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "As query", - "propertyKeyword": "query", + "propertyKeyword": "selectStatement", "propertyType": "details", "propertyTooltip": "A SELECT or VALUES command which will provide the columns and rows of the view.", "template": "textarea", diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 187d523..83c3ea7 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -103,11 +103,11 @@ module.exports = { })) ).then(tablesDataPerSchema => { const relationships = tablesDataPerSchema - .flatMap(({ entities }) => entities.map(entityData => entityData.relationships)) + .flatMap(({ entities }) => entities.tables.map(entityData => entityData.relationships)) .flat(); - const packages = tablesDataPerSchema.flatMap(({ schemaName, entities }) => - entities.map(entityData => ({ + const packages = tablesDataPerSchema.flatMap(({ schemaName, entities }) => { + const tablePackages = entities.tables.map(entityData => ({ dbName: schemaName, collectionName: entityData.name, documents: entityData.documents, @@ -117,9 +117,16 @@ module.exports = { validation: { jsonSchema: entityData.jsonSchema, }, - })) - ); + })); + const viewPackage = { + dbName: schemaName, + views: entities.views, + emptyBucket: false, + }; + + return [...tablePackages, viewPackage]; + }); return { packages, relationships }; }); diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 01cf08f..a4cdb70 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -166,7 +166,7 @@ const setSubtypeFromSampledJsonValues = (columns, documents) => { return { ...column, - synonym: jsonType, + subtype: jsonType, }; }); }; diff --git a/reverse_engineering/helpers/postgresHelpers/viewHelper.js b/reverse_engineering/helpers/postgresHelpers/viewHelper.js new file mode 100644 index 0000000..11cc1f3 --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/viewHelper.js @@ -0,0 +1,52 @@ +const { clearEmptyPropertiesInObject } = require('./common'); + +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const VIEW_SUFFIX = ' (v)'; + +const isViewByTableType = table_type => table_type === 'VIEW'; +const isViewByName = name => _.endsWith(name, VIEW_SUFFIX); +const removeViewNameSuffix = name => name.slice(0, -VIEW_SUFFIX.length); +const setViewSuffix = name => `${name}${VIEW_SUFFIX}`; + +const generateCreateViewScript = (viewName, viewData) => { + return `CREATE VIEW ${viewName} AS ${viewData.view_definition}`; +}; + +const prepareViewData = (viewData, viewOptions) => { + const data = { + withCheckOption: Boolean(viewData.check_option), + checkTestingScope: getCheckTestingScope(viewData.check_option), + view_option: _.join(viewOptions.view_options, ','), + temporary: viewOptions.persistence === 't', + recursive: isViewRecursive(viewData), + }; + + return clearEmptyPropertiesInObject(data); +}; + +const getCheckTestingScope = check_option => { + if (check_option === 'NONE') { + return ''; + } + + return check_option; +}; + +const isViewRecursive = viewData => { + return _.startsWith(_.trim(viewData.view_definition), 'WITH RECURSIVE'); +}; + +module.exports = { + setDependencies, + isViewByTableType, + isViewByName, + removeViewNameSuffix, + generateCreateViewScript, + setViewSuffix, + prepareViewData, +}; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index e34db39..e5e00b2 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -23,14 +23,21 @@ const { prepareTableLevelData, prepareTableIndexes, } = require('./postgresHelpers/tableHelper'); +const { + setDependencies: setViewDependenciesInViewHelper, + isViewByTableType, + isViewByName, + removeViewNameSuffix, + generateCreateViewScript, + setViewSuffix, + prepareViewData, +} = require('./postgresHelpers/viewHelper'); const queryConstants = require('./queryConstants'); let currentSshTunnel = null; let _ = null; let logger = null; -const VIEW_SUFFIX = ' (v)'; - module.exports = { setDependencies(app) { _ = app.require('lodash'); @@ -38,6 +45,7 @@ module.exports = { setDependenciesInTableHelper(app); setDependenciesInColumnHelper(app); setDependenciesInForeignKeysHelper(app); + setViewDependenciesInViewHelper(app); }, async connect(connectionInfo, specificLogger) { @@ -85,7 +93,7 @@ module.exports = { .filter(({ table_type }) => !_.includes(tableTypesToExclude, table_type)) .map(({ table_name, table_type }) => { if (isViewByTableType(table_type)) { - return `${table_name}${VIEW_SUFFIX}`; + return setViewSuffix(table_name); } else { return table_name; } @@ -98,12 +106,18 @@ module.exports = { const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); - return Promise.all( + const tables = await Promise.all( _.map( tablesNames, _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName) ) ); + + const views = await Promise.all( + _.map(viewsNames, _.bind(this._retrieveSingleViewData, this, schemaOid, schemaName)) + ); + + return { views, tables }; }, async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, tableName) { @@ -182,10 +196,26 @@ module.exports = { return await db.queryTolerant(queryConstants.GET_SAMPLED_DATA(fullTableName), [limit]); }, -}; -const isViewByTableType = table_type => table_type === 'VIEW'; -const isViewByName = name => _.endsWith(name, VIEW_SUFFIX); + async _retrieveSingleViewData(schemaOid, schemaName, viewName) { + viewName = removeViewNameSuffix(viewName); + + const viewData = await db.query(queryConstants.GET_VIEW_DATA, [viewName, schemaName], true); + const viewOptions = await db.queryTolerant(queryConstants.GET_VIEW_OPTIONS, [viewName, schemaOid], true); + + const script = generateCreateViewScript(viewName, viewData); + const data = prepareViewData(viewData, viewOptions); + + return { + name: viewName, + data, + ddl: { + script, + type: 'postgres', + }, + }; + }, +}; const isSystemSchema = schema_name => { if (_.startsWith(schema_name, 'pg_')) { diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 848eeba..c3f4852 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -130,6 +130,12 @@ const queryConstants = { LEFT JOIN pg_class AS pc_foreign_table ON (pcon.confrelid = pc_foreign_table.oid) JOIN pg_namespace AS foreign_table_namespace ON (pc_foreign_table.relnamespace = foreign_table_namespace.oid) WHERE pcon.conrelid = $1 AND pcon.contype = 'f';`, + GET_VIEW_DATA: `SELECT * FROM information_schema.views WHERE table_name = $1 AND table_schema = $2;`, + GET_VIEW_OPTIONS: ` + SELECT reloptions AS view_options, + relpersistence AS persistence + FROM pg_class + WHERE relname = $1 AND relnamespace = $2;`, }; const getQueryName = query => { diff --git a/types/boolean.json b/types/boolean.json index d3b03e6..1846399 100644 --- a/types/boolean.json +++ b/types/boolean.json @@ -5,6 +5,7 @@ "parentType": "boolean", "sample": true, "useSample": true, + "hiddenOnEntity": "view", "defaultValues": { "primaryKey": false, "default": "", diff --git a/types/enum.json b/types/enum.json index 2d3c012..81b51be 100644 --- a/types/enum.json +++ b/types/enum.json @@ -3,6 +3,7 @@ "erdAbbreviation": "", "dtdAbbreviation": "{enum}", "parentType": "string", + "hiddenOnEntity": "view", "defaultValues": { "primaryKey": false, "relationshipType": "", diff --git a/types/json.json b/types/json.json index de993e4..fcb70ec 100644 --- a/types/json.json +++ b/types/json.json @@ -3,7 +3,7 @@ "erdAbbreviation": "", "dtdAbbreviation": "{...}", "parentType": "string", - "default": true, + "hiddenOnEntity": "view", "defaultValues": { "primaryKey": false, "relationshipType": "", @@ -14,7 +14,7 @@ "enabled": true, "additionalProperties": false, "mode": "json", - "subtype": "object" + "subtype": "string" }, "subtypes": { "object": { diff --git a/types/object.json b/types/object.json index 4693ba1..2b43330 100644 --- a/types/object.json +++ b/types/object.json @@ -2,6 +2,7 @@ "name": "object", "parentType": "document", "structureType": true, + "hiddenOnEntity": "view", "defaultValues": { "subtype": "object", "properties": [] diff --git a/types/xml.json b/types/xml.json new file mode 100644 index 0000000..d3ec758 --- /dev/null +++ b/types/xml.json @@ -0,0 +1,11 @@ +{ + "name": "xml", + "erdAbbreviation": "", + "dtdAbbreviation": "{xml}", + "parentType": "binary", + "useSample": false, + "hiddenOnEntity": "view", + "defaultValues": { + "primaryKey": false + } +} \ No newline at end of file From 987947dc76f063453e2d8344ea51819f19df8bf0 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 1 Oct 2021 17:10:02 +0300 Subject: [PATCH 15/69] RE: added RE of functions and procedures --- .../container_level/containerLevelConfig.json | 653 +++++++++--------- reverse_engineering/api.js | 23 +- .../helpers/postgresHelpers/columnHelper.js | 1 + .../helpers/postgresHelpers/functionHelper.js | 84 +++ .../helpers/postgresService.js | 54 +- reverse_engineering/helpers/queryConstants.js | 98 ++- 6 files changed, 539 insertions(+), 374 deletions(-) create mode 100644 reverse_engineering/helpers/postgresHelpers/functionHelper.js diff --git a/properties_pane/container_level/containerLevelConfig.json b/properties_pane/container_level/containerLevelConfig.json index a7b39fe..d644aea 100644 --- a/properties_pane/container_level/containerLevelConfig.json +++ b/properties_pane/container_level/containerLevelConfig.json @@ -103,7 +103,6 @@ making sure that you maintain a proper JSON format. } */ - [ { "lowerTab": "Details", @@ -136,334 +135,340 @@ making sure that you maintain a proper JSON format. }, { "lowerTab": "Functions", - "structure": [{ - "propertyName": "Functions", - "propertyType": "group", - "propertyKeyword": "UDFs", - "propertyTooltip": "Creates a new scalar user-defined function (UDF) using either a SQL SELECT clause or a Python program.", - "structure": [ - { - "propertyName": "Name", - "propertyKeyword": "funcName", - "propertyTooltip": "The name of the function.", - "propertyType": "text" - }, - { - "propertyName": "Comments", - "propertyKeyword": "funcDescription", - "propertyTooltip": "description", - "propertyType": "details", - "template": "textarea" - }, - { - "propertyName": "Or replace", - "propertyKeyword": "funcOrReplace", - "propertyType": "checkbox", - "propertyTooltip": "Specifies that if a function with the same name and input argument data types, or signature, as this one already exists, the existing function is replaced. You can only replace a function with a new function that defines an identical set of data types." - }, - { - "propertyName": "Arguments", - "propertyKeyword": "procArgs", - "propertyType": "group", - "propertyTooltip": "", - "structure": [ - { - "propertyName": "Arg mode", - "propertyKeyword": "argmode", - "propertyTooltip": "language", - "propertyType": "The mode of an argument: IN, INOUT, or VARIADIC.", - "propertyType": "select", - "defaultValue": "IN", - "options": [ - "IN", - "INOUT", - "VARIADIC" - ] - }, - { - "propertyName": "Argument name", - "propertyKeyword": "argname", - "propertyTooltip": "The name of the argument.", - "propertyType": "text" - }, - { - "propertyName": "Arg type default", - "propertyKeyword": "artypedefault", - "propertyType": "checkbox", - "defaultValue": true, - "propertyTooltip": "Uncheck if you need to specify another type" - }, - { - "propertyName": "Arg type expression", - "propertyKeyword": "default_expr", - "propertyTooltip": "The argument types can be base, composite, or domain types, or can reference the type of a table column.", - "propertyType": "text", - "dependency": { - "key": "artypedefault", - "value": false + "structure": [ + { + "propertyName": "Functions", + "propertyType": "group", + "propertyKeyword": "UDFs", + "propertyTooltip": "Creates a new scalar user-defined function (UDF) using either a SQL SELECT clause or a Python program.", + "structure": [ + { + "propertyName": "Name", + "propertyKeyword": "name", + "propertyTooltip": "The name of the function.", + "propertyType": "text" + }, + { + "propertyName": "Comments", + "propertyKeyword": "functionDescription", + "propertyTooltip": "description", + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Or replace", + "propertyKeyword": "functionOrReplace", + "propertyType": "checkbox", + "propertyTooltip": "Specifies that if a function with the same name and input argument data types, or signature, as this one already exists, the existing function is replaced. You can only replace a function with a new function that defines an identical set of data types." + }, + { + "propertyName": "Arguments", + "propertyKeyword": "functionArguments", + "propertyType": "group", + "propertyTooltip": "", + "structure": [ + { + "propertyName": "Arg mode", + "propertyKeyword": "argumentMode", + "propertyTooltip": "The mode of an argument: IN, INOUT, or VARIADIC.", + "propertyType": "select", + "defaultValue": "IN", + "options": [ + "IN", + "INOUT", + "VARIADIC" + ] + }, + { + "propertyName": "Argument name", + "propertyKeyword": "argumentName", + "propertyTooltip": "The name of the argument.", + "propertyType": "text" + }, + { + "propertyName": "Argument type", + "propertyKeyword": "argumentType", + "propertyTooltip": "The type of argument.", + "propertyType": "text" + }, + { + "propertyName": "Arg type default", + "propertyKeyword": "argumentTypeDefault", + "propertyType": "checkbox", + "defaultValue": true, + "propertyTooltip": "Uncheck if you need to specify another type" + }, + { + "propertyName": "Arg type expression", + "propertyKeyword": "defaultExpression", + "propertyTooltip": "The argument types can be base, composite, or domain types, or can reference the type of a table column.", + "propertyType": "text", + "dependency": { + "key": "argumentTypeDefault", + "value": false + } } + ] + }, + { + "propertyName": "Returns set of", + "propertyKeyword": "functionReturnsSetOf", + "propertyType": "checkbox", + "propertyTooltip": "Indicates that the function will return a set of items, rather than a single item." + }, + { + "propertyName": "Returns data type", + "propertyKeyword": "functionReturnType", + "propertyTooltip": "Any valid PostgreSQL data type", + "propertyType": "text", + "dependency": { + "key": "setOf", + "value": false } - ] - }, - { - "propertyName": "Returns set of", - "propertyKeyword": "setOf", - "propertyType": "checkbox", - "propertyTooltip": "Indicates that the function will return a set of items, rather than a single item." - }, - { - "propertyName": "Returns data type", - "propertyKeyword": "returnType", - "propertyTooltip": "Any valid PostgreSQL data type", - "propertyType": "text", - "dependency": { - "key": "setOf", - "value": false - } - }, - { - "propertyName": "Language", - "propertyKeyword": "lang_name", - "propertyTooltip": "The name of the language that the procedure is implemented in.", - "propertyType": "select", - "defaultValue": "sql", - "options": [ - "sql", - "plpgsql", - "c", - "internal" - ] - }, - { - "propertyName": "Definition", - "propertyKeyword": "definition", - "propertyTooltip": "A string constant defining the function; the meaning depends on the language. It can be an internal function name, the path to an object file, an SQL command, or text in a procedural language.", - "propertyType": "details", - "template": "textarea", - "markdown": false - }, - { - "propertyName": "Window", - "propertyKeyword": "window", - "propertyType": "checkbox", - "propertyTooltip": "Indicates that the function is a window function rather than a plain function. This is currently only useful for functions written in C.", - "dependency": { - "key": "lang_name", - "value": "c" - } - }, - { - "propertyName": "Volatility", - "propertyKeyword": "volatility", - "propertyType": "select", - "propertyTooltip": "These attributes inform the query optimizer about the behavior of the function.", - "defaultValue": "VOLATILE", - "options": [ - "IMMUTABLE", - "STABLE", - "VOLATILE" - ] - }, - { - "propertyName": "Leak proof", - "propertyKeyword": "leakproof", - "propertyType": "checkbox", - "propertyTooltip": "Indicates that the function has no side effects. It reveals no information about its arguments other than by its return value." - }, - { - "propertyName": "When NULL args", - "propertyKeyword": "volatility", - "propertyType": "select", - "propertyTooltip": "Indicates whether the function will be called normally when some of its arguments are null, or the function always returns null ", - "defaultValue": "CALLED ON NULL INPUT", - "options": [ - "CALLED ON NULL INPUT", - "RETURNS NULL ON NULL INPUT", - "STRICT" - ] - }, - { - "propertyName": "SQL Security", - "propertyKeyword": "functionSqlSecurity", - "propertyTooltip": "Clause specifying what privileges are used when a procedure is called. If SQL SECURITY is INVOKER, the procedure body will be evaluated using the privileges of the user calling the procedure. If SQL SECURITY is DEFINER, the procedure body is always evaluated using the privileges of the definer account. DEFINER is the default.", - "propertyType": "select", - "defaultValue": "DEFINER", - "options": [ - "", - "DEFINER", - "INVOKER" - ] - }, - { - "propertyName": "Parallel", - "propertyKeyword": "parallel", - "propertyTooltip": "Indicates whether the function can be executed in parallel mode and whether the presence of such a function in an SQL statement forces a serial execution plan.", - "propertyType": "select", - "defaultValue": "UNSAFE", - "options": [ - "", - "UNSAFE", - "RESTICTED", - "SAFE" - ] - }, - { - "propertyName": "Estimated cost", - "propertyKeyword": "execution_cost", - "propertyType": "numeric", - "valueType": "number", - "propertyTooltip": "A positive number giving the estimated execution cost for the function, in units of cpu_operator_cost." - }, - { - "propertyName": "Estimated rows", - "propertyKeyword": "execution_cost", - "propertyType": "numeric", - "valueType": "number", - "minValue": 0, - "defaultValue": 1000, - "step": 1, - "propertyTooltip": "A positive number giving the estimated number of rows that the planner should expect the function to return.", - "dependency": { - "key": "setOf", - "value": true + }, + { + "propertyName": "Language", + "propertyKeyword": "functionLanguage", + "propertyTooltip": "The name of the language that the procedure is implemented in.", + "propertyType": "select", + "defaultValue": "sql", + "options": [ + "sql", + "plpgsql", + "c", + "internal" + ] + }, + { + "propertyName": "Definition", + "propertyKeyword": "functionDefinition", + "propertyTooltip": "A string constant defining the function; the meaning depends on the language. It can be an internal function name, the path to an object file, an SQL command, or text in a procedural language.", + "propertyType": "details", + "template": "textarea", + "markdown": false + }, + { + "propertyName": "Window", + "propertyKeyword": "functionWindow", + "propertyType": "checkbox", + "propertyTooltip": "Indicates that the function is a window function rather than a plain function. This is currently only useful for functions written in C.", + "dependency": { + "key": "lang_name", + "value": "c" + } + }, + { + "propertyName": "Volatility", + "propertyKeyword": "functionVolatility", + "propertyType": "select", + "propertyTooltip": "These attributes inform the query optimizer about the behavior of the function.", + "defaultValue": "VOLATILE", + "options": [ + "IMMUTABLE", + "STABLE", + "VOLATILE" + ] + }, + { + "propertyName": "Leak proof", + "propertyKeyword": "functionLeakProof", + "propertyType": "checkbox", + "propertyTooltip": "Indicates that the function has no side effects. It reveals no information about its arguments other than by its return value." + }, + { + "propertyName": "When NULL args", + "propertyKeyword": "functionNullArgs", + "propertyType": "select", + "propertyTooltip": "Indicates whether the function will be called normally when some of its arguments are null, or the function always returns null ", + "defaultValue": "CALLED ON NULL INPUT", + "options": [ + "CALLED ON NULL INPUT", + "RETURNS NULL ON NULL INPUT", + "STRICT" + ] + }, + { + "propertyName": "SQL Security", + "propertyKeyword": "functionSqlSecurity", + "propertyTooltip": "Clause specifying what privileges are used when a procedure is called. If SQL SECURITY is INVOKER, the procedure body will be evaluated using the privileges of the user calling the procedure. If SQL SECURITY is DEFINER, the procedure body is always evaluated using the privileges of the definer account. DEFINER is the default.", + "propertyType": "select", + "defaultValue": "DEFINER", + "options": [ + "", + "DEFINER", + "INVOKER" + ] + }, + { + "propertyName": "Parallel", + "propertyKeyword": "functionParallel", + "propertyTooltip": "Indicates whether the function can be executed in parallel mode and whether the presence of such a function in an SQL statement forces a serial execution plan.", + "propertyType": "select", + "defaultValue": "UNSAFE", + "options": [ + "", + "UNSAFE", + "RESTICTED", + "SAFE" + ] + }, + { + "propertyName": "Estimated cost", + "propertyKeyword": "functionExecutionCost", + "propertyType": "numeric", + "valueType": "number", + "propertyTooltip": "A positive number giving the estimated execution cost for the function, in units of cpu_operator_cost." + }, + { + "propertyName": "Estimated rows", + "propertyKeyword": "functionExecutionRows", + "propertyType": "numeric", + "valueType": "number", + "minValue": 0, + "defaultValue": 1000, + "step": 1, + "propertyTooltip": "A positive number giving the estimated number of rows that the planner should expect the function to return.", + "dependency": { + "key": "setOf", + "value": true + } + }, + { + "propertyName": "Support function", + "propertyKeyword": "functionSupportFunction", + "propertyTooltip": "The name of a planner support function to use for this function.", + "propertyType": "details", + "template": "textarea", + "markdown": false + }, + { + "propertyName": "Config parameters", + "propertyKeyword": "functionConfigurationParameters", + "propertyTooltip": "The SET clause causes the specified configuration parameter to be set to the specified value when the function is entered, and then restored to its prior value when the function exits.", + "propertyType": "details", + "template": "textarea", + "markdown": false + }, + { + "propertyName": "Remarks", + "propertyKeyword": "functionComments", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" } - }, - { - "propertyName": "Support function", - "propertyKeyword": "support_function", - "propertyTooltip": "The name of a planner support function to use for this function.", - "propertyType": "details", - "template": "textarea", - "markdown": false - }, - { - "propertyName": "Config parameters", - "propertyKeyword": "configuration_parameter", - "propertyTooltip": "The SET clause causes the specified configuration parameter to be set to the specified value when the function is entered, and then restored to its prior value when the function exits.", - "propertyType": "details", - "template": "textarea", - "markdown": false - }, - { - "propertyName": "Remarks", - "propertyKeyword": "functionComments", - "propertyTooltip": "comments", - "addTimestampButton": false, - "propertyType": "details", - "template": "textarea" - } - ] - }] + ] + } + ] }, { "lowerTab": "Procedures", - "structure": [{ - "propertyName": "Procedures", - "propertyType": "group", - "propertyKeyword": "Procedures", - "propertyTooltip": "Creates a new stored procedure or replaces an existing procedure for the current database.", - "structure": [ - { - "propertyName": "Name", - "propertyKeyword": "name", - "propertyTooltip": "The name of the procedure.", - "propertyType": "text" - }, - { - "propertyName": "Comments", - "propertyKeyword": "functionDescription", - "propertyTooltip": "description", - "propertyType": "details", - "template": "textarea" - }, - { - "propertyName": "Or replace", - "propertyKeyword": "orReplace", - "propertyType": "checkbox", - "propertyTooltip": "Specifies that if a procedure with the same name and input argument data types, or signature, as this one already exists, the existing function is replaced. You can only replace a function with a new function that defines an identical set of data types." - }, - { - "propertyName": "Language", - "propertyKeyword": "lang_name", - "propertyTooltip": "The name of the language that the procedure is implemented in.", - "propertyType": "select", - "defaultValue": "sql", - "options": [ - "sql", - "plpgsql", - "c", - "internal" - ] - }, - { - "propertyName": "Arguments", - "propertyKeyword": "procArgs", - "propertyType": "group", - "propertyTooltip": "", - "structure": [ - { - "propertyName": "Arg mode", - "propertyKeyword": "argmode", - "propertyTooltip": "language", - "propertyType": "The mode of an argument: IN, INOUT, or VARIADIC.", - "propertyType": "select", - "defaultValue": "IN", - "options": [ - "IN", - "INOUT", - "VARIADIC" - ] - }, - { - "propertyName": "Argument name", - "propertyKeyword": "argname", - "propertyTooltip": "The name of the argument.", - "propertyType": "text" - }, - { - "propertyName": "Arg type default", - "propertyKeyword": "artypedefault", - "propertyType": "checkbox", - "defaultValue": true, - "propertyTooltip": "Uncheck if you need to specify another type" - }, - { - "propertyName": "Arg type expression", - "propertyKeyword": "default_expr", - "propertyTooltip": "The argument types can be base, composite, or domain types, or can reference the type of a table column.", - "propertyType": "text", - "dependency": { - "key": "artypedefault", - "value": false + "structure": [ + { + "propertyName": "Procedures", + "propertyType": "group", + "propertyKeyword": "Procedures", + "propertyTooltip": "Creates a new stored procedure or replaces an existing procedure for the current database.", + "structure": [ + { + "propertyName": "Name", + "propertyKeyword": "name", + "propertyTooltip": "The name of the procedure.", + "propertyType": "text" + }, + { + "propertyName": "Comments", + "propertyKeyword": "description", + "propertyTooltip": "description", + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Or replace", + "propertyKeyword": "orReplace", + "propertyType": "checkbox", + "propertyTooltip": "Specifies that if a procedure with the same name and input argument data types, or signature, as this one already exists, the existing function is replaced. You can only replace a function with a new function that defines an identical set of data types." + }, + { + "propertyName": "Language", + "propertyKeyword": "language", + "propertyTooltip": "The name of the language that the procedure is implemented in.", + "propertyType": "select", + "defaultValue": "sql", + "options": [ + "sql", + "plpgsql", + "c", + "internal" + ] + }, + { + "propertyName": "Arguments", + "propertyKeyword": "inputArgs", + "propertyType": "group", + "propertyTooltip": "", + "structure": [ + { + "propertyName": "Arg mode", + "propertyKeyword": "argumentMode", + "propertyTooltip": "The mode of an argument: IN, INOUT, or VARIADIC.", + "propertyType": "select", + "defaultValue": "IN", + "options": [ + "IN", + "INOUT", + "VARIADIC" + ] + }, + { + "propertyName": "Argument name", + "propertyKeyword": "argumentName", + "propertyTooltip": "The name of the argument.", + "propertyType": "text" + }, + { + "propertyName": "Argument type", + "propertyKeyword": "argumentType", + "propertyTooltip": "The type of argument.", + "propertyType": "text" + }, + { + "propertyName": "Arg type default", + "propertyKeyword": "argumentTypeDefault", + "propertyType": "checkbox", + "defaultValue": true, + "propertyTooltip": "Uncheck if you need to specify another type" + }, + { + "propertyName": "Arg type expression", + "propertyKeyword": "defaultExpression", + "propertyTooltip": "The argument types can be base, composite, or domain types, or can reference the type of a table column.", + "propertyType": "text", + "dependency": { + "key": "argumentTypeDefault", + "value": false + } } - } - ] - }, - { - "propertyName": "Procedure body", - "propertyKeyword": "procBody", - "propertyTooltip": "Valid procedure statement.", - "propertyType": "details", - "template": "textarea", - "markdown": false - }, - { - "propertyName": "Parameters", - "propertyKeyword": "inputArgs", - "propertyTooltip": "A list of parameter names, modes, and data types. ", - "propertyType": "details", - "template": "textarea", - "markdown": false - }, - { - "propertyName": "Remarks", - "propertyKeyword": "functionComments", - "propertyTooltip": "comments", - "addTimestampButton": false, - "propertyType": "details", - "template": "textarea" - } - ] - }] + ] + }, + { + "propertyName": "Procedure body", + "propertyKeyword": "body", + "propertyTooltip": "Valid procedure statement.", + "propertyType": "details", + "template": "textarea", + "markdown": false + }, + { + "propertyName": "Remarks", + "propertyKeyword": "comments", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + } + ] + } + ] } -] +] \ No newline at end of file diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 83c3ea7..439a646 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -95,19 +95,25 @@ module.exports = { const { packages, relationships } = await Promise.all( schemasNames.map(async schemaName => ({ schemaName, - entities: await postgresService.retrieveEntitiesData( + ...(await postgresService.retrieveEntitiesData( schemaName, collections[schemaName], data.recordSamplingSettings - ), + )), + ...(await postgresService.retrieveFunctionsWithProcedures(schemaName)), })) - ).then(tablesDataPerSchema => { - const relationships = tablesDataPerSchema - .flatMap(({ entities }) => entities.tables.map(entityData => entityData.relationships)) + ).then(schemaData => { + const relationships = schemaData + .flatMap(({ tables }) => tables.map(entityData => entityData.relationships)) .flat(); - const packages = tablesDataPerSchema.flatMap(({ schemaName, entities }) => { - const tablePackages = entities.tables.map(entityData => ({ + const packages = schemaData.flatMap(({ schemaName, tables, views, functions, procedures }) => { + const bucketInfo = { + UDFs: functions, + Procedures: procedures, + }; + + const tablePackages = tables.map(entityData => ({ dbName: schemaName, collectionName: entityData.name, documents: entityData.documents, @@ -117,11 +123,12 @@ module.exports = { validation: { jsonSchema: entityData.jsonSchema, }, + bucketInfo, })); const viewPackage = { dbName: schemaName, - views: entities.views, + views: views, emptyBucket: false, }; diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index a4cdb70..af8e300 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -23,6 +23,7 @@ const columnPropertiesMapper = { number_of_array_dimensions: 'numberOfArrayDimensions', udt_name: 'udt_name', character_maximum_length: 'length', + description: 'description', }; const mapColumnData = column => { diff --git a/reverse_engineering/helpers/postgresHelpers/functionHelper.js b/reverse_engineering/helpers/postgresHelpers/functionHelper.js new file mode 100644 index 0000000..f71dab5 --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/functionHelper.js @@ -0,0 +1,84 @@ +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const mapFunctionArgs = args => { + return _.map(args, arg => ({ + argumentMode: arg.parameter_mode, + argumentName: arg.parameter_name, + argumentType: arg.data_type, + argumentTypeDefault: !Boolean(arg.parameter_default), + defaultExpression: arg.parameter_default, + })); +}; + +const getVolatility = volatility => { + switch (volatility) { + case 'i': + return 'IMMUTABLE'; + case 's': + return 'STABLE'; + case 'v': + default: + return 'VOLATILE'; + } +}; + +const getParallel = parallel => { + switch (parallel) { + case 's': + return 'SAFE'; + case 'r': + return 'RESTICTED'; + case 'u': + return 'UNSAFE'; + default: + return ''; + } +}; + +const getNullArgs = strict => { + if (strict) { + return 'STRICT'; + } + + return 'CALLED ON NULL INPUT'; +}; + +const mapFunctionData = (functionData, functionArgs, additionalData) => { + return { + name: functionData.name, + functionDescription: additionalData?.description, + functionArguments: mapFunctionArgs(functionArgs), + functionReturnsSetOf: additionalData?.returnsSet, + functionReturnType: functionData.return_data_type, + functionLanguage: _.toLower(functionData.external_language), + functionDefinition: functionData.routine_definition, + functionWindow: additionalData.kind === 'w', + functionVolatility: getVolatility(additionalData?.volatility), + functionLeakProof: additionalData?.leak_proof, + functionNullArgs: getNullArgs(additionalData?.strict), + functionSqlSecurity: functionData.security_type, + functionParallel: getParallel(functionData.parallel), + functionExecutionCost: functionData.estimated_cost, + functionExecutionRows: functionData.estimated_rows, + }; +}; + +const mapProcedureData = (functionData, functionArgs, additionalData) => { + return { + name: functionData.name, + description: additionalData?.description, + language: _.toLower(functionData.external_language), + inputArgs: mapFunctionArgs(functionArgs), + body: functionData.routine_definition, + }; +}; + +module.exports = { + setDependencies, + mapFunctionData, + mapProcedureData, +}; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index e5e00b2..a33b8d2 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -14,6 +14,11 @@ const { setDependencies: setDependenciesInForeignKeysHelper, prepareForeignKeys, } = require('./postgresHelpers/foreignKeysHelper'); +const { + setDependencies: setFunctionHelperDependencies, + mapFunctionData, + mapProcedureData, +} = require('./postgresHelpers/functionHelper'); const { setDependencies: setDependenciesInTableHelper, prepareTablePartition, @@ -46,6 +51,7 @@ module.exports = { setDependenciesInColumnHelper(app); setDependenciesInForeignKeysHelper(app); setViewDependenciesInViewHelper(app); + setFunctionHelperDependencies(app); }, async connect(connectionInfo, specificLogger) { @@ -102,24 +108,52 @@ module.exports = { async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { const schemaOidResult = await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true); - const schemaOid = schemaOidResult.oid; + const schemaOid = schemaOidResult?.oid; const [viewsNames, tablesNames] = _.partition(entitiesNames, isViewByName); - const tables = await Promise.all( - _.map( - tablesNames, - _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName) - ) + const tables = await mapPromises( + tablesNames, + _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName) ); - const views = await Promise.all( - _.map(viewsNames, _.bind(this._retrieveSingleViewData, this, schemaOid, schemaName)) - ); + const views = await mapPromises(viewsNames, _.bind(this._retrieveSingleViewData, this, schemaOid, schemaName)); return { views, tables }; }, + async retrieveFunctionsWithProcedures(schemaName) { + const schemaOid = (await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true))?.oid; + + const functionsWithProcedures = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES, [ + schemaName, + ]); + const functionAdditionalData = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL, [ + schemaOid, + ]); + const [functions, procedures] = _.partition(functionsWithProcedures, { routine_type: 'FUNCTION' }); + + const userDefinedFunctions = await mapPromises(functions, async functionData => { + const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ + functionData.specific_name, + ]); + const additionalData = _.find(functionAdditionalData, { function_name: functionData.name }); + + return mapFunctionData(functionData, functionArgs, additionalData); + }); + + const userDefinedProcedures = await mapPromises(procedures, async functionData => { + const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ + functionData.specific_name, + ]); + const additionalData = _.find(functionAdditionalData, { function_name: functionData.name }); + + return mapProcedureData(functionData, functionArgs, additionalData); + }); + + return { functions: userDefinedFunctions, procedures: userDefinedProcedures }; + }, + async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, tableName) { const tableLevelData = await db.queryTolerant( queryConstants.GET_TABLE_LEVEL_DATA, @@ -230,3 +264,5 @@ const isSystemSchema = schema_name => { }; const getDescriptionFromResult = result => result?.obj_description; + +const mapPromises = (items, asyncFunc) => Promise.all(_.map(items, asyncFunc)); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index c3f4852..ce8f7f8 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -7,33 +7,35 @@ const queryConstants = { FROM information_schema.tables WHERE table_schema = $1 ORDER BY table_name;`, - GET_NAMESPACE_OID: 'SELECT oid FROM pg_namespace WHERE nspname = $1', + GET_NAMESPACE_OID: 'SELECT oid FROM pg_catalog.pg_namespace WHERE nspname = $1', GET_TABLE_LEVEL_DATA: ` SELECT pc.oid, pc.relpersistence, pc.reloptions, pt.spcname - FROM pg_class AS pc - LEFT JOIN pg_tablespace AS pt + FROM pg_catalog.pg_class AS pc + LEFT JOIN pg_catalog.pg_tablespace AS pt ON pc.reltablespace = pt.oid WHERE pc.relname = $1 AND pc.relnamespace = $2;`, GET_TABLE_PARTITION_DATA: ` SELECT partstrat as partition_method, partattrs::int2[] as partition_attributes_positions, - pg_get_expr(partexprs, partrelid) AS expressions - FROM pg_partitioned_table + pg_catalog.pg_get_expr(partexprs, partrelid) AS expressions + FROM pg_catalog.pg_partitioned_table WHERE partrelid = $1;`, GET_TABLE_COLUMNS: ` SELECT * FROM information_schema.columns WHERE table_name = $1 AND table_schema = $2 ORDER BY ordinal_position`, GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` - SELECT attname AS name, attndims AS number_of_array_dimensions - FROM pg_attribute + SELECT attname AS name, + attndims AS number_of_array_dimensions, + obj_description(attrelid, 'pg_class') AS description + FROM pg_catalog.pg_attribute WHERE attrelid = $1;`, GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) AS quantity FROM ${fullTableName};`, GET_SAMPLED_DATA: fullTableName => `SELECT * FROM ${fullTableName} LIMIT $1;`, GET_INHERITS_PARENT_TABLE_NAME: ` - SELECT pc.relname AS parent_table_name FROM pg_inherits AS pi - INNER JOIN pg_class AS pc + SELECT pc.relname AS parent_table_name FROM pg_catalog.pg_inherits AS pi + INNER JOIN pg_catalog.pg_class AS pc ON pc.oid = pi.inhparent WHERE pi.inhrelid = $1;`, GET_TABLE_CONSTRAINTS: ` @@ -41,14 +43,14 @@ const queryConstants = { pcon.contype AS constraint_type, pcon.connoinherit AS no_inherit, pcon.conkey AS constraint_keys, - pg_get_expr(pcon.conbin, pcon.conrelid) AS expression, - obj_description(pcon.oid) AS description, + pg_catalog.pg_get_expr(pcon.conbin, pcon.conrelid) AS expression, + obj_description(pcon.oid, 'pg_constraint') AS description, pc.reloptions AS storage_parameters, pt.spcname AS tablespace - FROM pg_constraint AS pcon - LEFT JOIN pg_class AS pc + FROM pg_catalog.pg_constraint AS pcon + LEFT JOIN pg_catalog.pg_class AS pc ON pcon.conindid = pc.oid - LEFT JOIN pg_tablespace AS pt + LEFT JOIN pg_catalog.pg_tablespace AS pt ON pc.reltablespace = pt.oid WHERE pcon.conrelid = $1;`, GET_TABLE_INDEXES: ` @@ -85,12 +87,12 @@ const queryConstants = { WHEN opclass_t.opcname is not null THEN format('%I.%I',opclas_namespace.nspname,opclass_t.opcname) END AS opclass, CASE - WHEN indexes.ord > 0 THEN pg_index_column_has_property(indexes.indexrelid, indexes.key, 'asc') + WHEN indexes.ord > 0 THEN pg_catalog.pg_index_column_has_property(indexes.indexrelid, indexes.key, 'asc') END AS ascending, CASE - WHEN indexes.ord > 0 THEN pg_index_column_has_property(indexes.indexrelid, indexes.key, 'nulls_first') + WHEN indexes.ord > 0 THEN pg_catalog.pg_index_column_has_property(indexes.indexrelid, indexes.key, 'nulls_first') END AS nulls_first, - pg_get_indexdef(indexes.indexrelid, ord, false) AS expression + pg_catalog.pg_get_indexdef(indexes.indexrelid, ord, false) AS expression FROM (SELECT *, generate_series(1,array_length(i.indkey,1)) AS ord, @@ -98,17 +100,17 @@ const queryConstants = { unnest(i.indcollation) AS coll, unnest(i.indclass) AS class, unnest(i.indoption) AS option - FROM pg_index i) indexes - JOIN pg_class c ON (c.oid=indexes.indexrelid) - JOIN pg_class ct ON (ct.oid=indexes.indrelid) - JOIN pg_am m ON (m.oid=c.relam) - LEFT JOIN pg_attribute attribute ON (attribute.attrelid=indexes.indrelid + FROM pg_catalog.pg_index i) indexes + JOIN pg_catalog.pg_class c ON (c.oid=indexes.indexrelid) + JOIN pg_catalog.pg_class ct ON (ct.oid=indexes.indrelid) + JOIN pg_catalog.pg_am m ON (m.oid=c.relam) + LEFT JOIN pg_catalog.pg_attribute attribute ON (attribute.attrelid=indexes.indrelid AND attribute.attnum=indexes.key) - LEFT JOIN pg_collation collation_t ON (collation_t.oid=indexes.coll) - LEFT JOIN pg_namespace collation_namespace ON (collation_namespace.oid=collation_t.collnamespace) - LEFT JOIN pg_opclass opclass_t ON (opclass_t.oid=indexes.class) - LEFT JOIN pg_namespace opclas_namespace ON (opclas_namespace.oid=opclass_t.opcnamespace) - LEFT JOIN pg_tablespace tablespace_t ON (tablespace_t.oid = c.reltablespace)) s2 + LEFT JOIN pg_catalog.pg_collation collation_t ON (collation_t.oid=indexes.coll) + LEFT JOIN pg_catalog.pg_namespace collation_namespace ON (collation_namespace.oid=collation_t.collnamespace) + LEFT JOIN pg_catalog.pg_opclass opclass_t ON (opclass_t.oid=indexes.class) + LEFT JOIN pg_catalog.pg_namespace opclas_namespace ON (opclas_namespace.oid=opclass_t.opcnamespace) + LEFT JOIN pg_catalog.pg_tablespace tablespace_t ON (tablespace_t.oid = c.reltablespace)) s2 WHERE table_oid = $1 GROUP BY indexname, index_method, @@ -124,18 +126,48 @@ const queryConstants = { JOIN information_schema.columns ON (ordinal_position = column_position) WHERE table_name = pc_foreign_table.relname AND table_schema = foreign_table_namespace.nspname)::text[] AS foreign_columns, foreign_table_namespace.nspname AS foreign_table_schema - FROM pg_constraint AS pcon - LEFT JOIN pg_class AS pc ON pcon.conindid = pc.oid - LEFT JOIN pg_tablespace AS pt ON pc.reltablespace = pt.oid - LEFT JOIN pg_class AS pc_foreign_table ON (pcon.confrelid = pc_foreign_table.oid) - JOIN pg_namespace AS foreign_table_namespace ON (pc_foreign_table.relnamespace = foreign_table_namespace.oid) + FROM pg_catalog.pg_constraint AS pcon + LEFT JOIN pg_catalog.pg_class AS pc ON pcon.conindid = pc.oid + LEFT JOIN pg_catalog.pg_tablespace AS pt ON pc.reltablespace = pt.oid + LEFT JOIN pg_catalog.pg_class AS pc_foreign_table ON (pcon.confrelid = pc_foreign_table.oid) + JOIN pg_catalog.pg_namespace AS foreign_table_namespace ON (pc_foreign_table.relnamespace = foreign_table_namespace.oid) WHERE pcon.conrelid = $1 AND pcon.contype = 'f';`, GET_VIEW_DATA: `SELECT * FROM information_schema.views WHERE table_name = $1 AND table_schema = $2;`, GET_VIEW_OPTIONS: ` SELECT reloptions AS view_options, relpersistence AS persistence - FROM pg_class + FROM pg_catalog.pg_class WHERE relname = $1 AND relnamespace = $2;`, + GET_FUNCTIONS_WITH_PROCEDURES: ` + SELECT specific_name, + routine_name AS name, + routine_type, + routine_definition, + external_language, + security_type, + type_udt_name AS return_data_type + FROM information_schema.routines + WHERE specific_schema=$1;`, + GET_FUNCTIONS_WITH_PROCEDURES_ARGS: ` + SELECT parameter_name, + parameter_mode, + parameter_default, + data_type + FROM information_schema.parameters + WHERE specific_name = $1 + ORDER BY ordinal_position;`, + GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL: ` + SELECT obj_description(oid, 'pg_proc') AS description, + proname AS function_name, + provolatile AS volatility, + proparallel AS parallel, + proisstrict AS strict, + proretset AS returns_set, + proleakproof AS leak_proof, + procost AS estimated_cost, + prorows AS estimated_rows, + prokind AS kind + FROM pg_catalog.pg_proc WHERE pronamespace = $1;`, }; const getQueryName = query => { From 4fc8c328779760df2265942db8ef52033591026d Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 1 Oct 2021 17:10:51 +0300 Subject: [PATCH 16/69] Removed not used file --- reverse_engineering/helpers/packageDataHelper.js | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 reverse_engineering/helpers/packageDataHelper.js diff --git a/reverse_engineering/helpers/packageDataHelper.js b/reverse_engineering/helpers/packageDataHelper.js deleted file mode 100644 index e69de29..0000000 From 0d7457f3ffe2d21577654ba5f1b003458b0804c3 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 13:25:44 +0300 Subject: [PATCH 17/69] Added configs for user-defined types, fixed issues with json type --- localization/en.json | 10 +- .../field_level/fieldLevelConfig.json | 1404 +++++++++++------ types/composite.json | 13 + types/enum.json | 21 +- types/json.json | 22 +- types/range_type.json | 11 + 6 files changed, 961 insertions(+), 520 deletions(-) create mode 100644 types/composite.json create mode 100644 types/range_type.json diff --git a/localization/en.json b/localization/en.json index 5790966..4d4e388 100644 --- a/localization/en.json +++ b/localization/en.json @@ -23,7 +23,7 @@ "OBJECT___BROWSER_NOT_IN_BUCKET": "Undefined Schema", "OBJECT___BROWSER_COLLECTION": "Tables", "OBJECT___BROWSER_VIEWS": "Views", - "OBJECT___BROWSER_DEFINITIONS": "Composite Types", + "OBJECT___BROWSER_DEFINITIONS": "User-Defined Types", "OBJECT___BROWSER_FIELDS": "Columns", "PROPERTIES_PANE___BUCKET_NAME": "Schema name", "PROPERTIES_PANE___VIEW_NAME": "View name", @@ -136,10 +136,10 @@ "COLLECTION_SCHEMA_DEFINITION_TYPE": "document", "MONGODB_SCRIPT_WARNING_MESSAGE": "This view is not associated to a type (viewOn property).", "TYPE": {}, - "CENTRAL_PANE___TAB_MODEL_DEFINITIONS": "Composite Types", - "CONTEXT_MENU___ADD_MODEL_REFERENCE": "Composite Type", - "CONTEXT_MENU___GO_TO_DEFINITION": "Go to Composite Type", - "DOCUMENTATION___DB_DEFINITIONS": "Composite Types", + "CENTRAL_PANE___TAB_MODEL_DEFINITIONS": "User-Defined Types", + "CONTEXT_MENU___ADD_MODEL_REFERENCE": "User-Defined Type", + "CONTEXT_MENU___GO_TO_DEFINITION": "Go to User-Defined Type", + "DOCUMENTATION___DB_DEFINITIONS": "User-Defined Types", "CONTEXT_MENU___CONVERT_TO_PATTERN_FIELD": "Convert to Pattern Column", "CONTEXT_MENU___CONVERT_PATTERN_TO_REGULAR_FIELD": "Convert to Regular Column" } \ No newline at end of file diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 8f062e3..8ac45c5 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -113,7 +113,6 @@ making sure that you maintain a proper JSON format. } */ - { "lowerTab": "JsonDetails", "structure": { @@ -191,7 +190,12 @@ making sure that you maintain a proper JSON format. "typeDecorator": true, "dependency": { "key": "mode", - "value": ["char","varchar", "bit", "varbit"] + "value": [ + "char", + "varchar", + "bit", + "varbit" + ] } }, { @@ -218,7 +222,11 @@ making sure that you maintain a proper JSON format. "propertyType": "text", "dependency": { "key": "mode", - "value": ["char","varchar", "text"] + "value": [ + "char", + "varchar", + "text" + ] } }, { @@ -256,23 +264,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -292,23 +306,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -376,23 +396,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -412,23 +438,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -662,7 +694,9 @@ making sure that you maintain a proper JSON format. "typeDecorator": true, "dependency": { "key": "mode", - "value": ["numeric"] + "value": [ + "numeric" + ] } }, { @@ -717,23 +751,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -753,23 +793,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -837,23 +883,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -873,23 +925,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -1027,23 +1085,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -1063,23 +1127,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -1147,23 +1217,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -1183,23 +1259,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -1316,7 +1398,10 @@ making sure that you maintain a proper JSON format. "typeDecorator": true, "dependency": { "key": "mode", - "value": ["time","timestamp"] + "value": [ + "time", + "timestamp" + ] } }, { @@ -1325,7 +1410,10 @@ making sure that you maintain a proper JSON format. "propertyType": "checkbox", "dependency": { "key": "mode", - "value": ["time","timestamp"] + "value": [ + "time", + "timestamp" + ] } }, { @@ -1406,23 +1494,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -1442,23 +1536,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -1526,23 +1626,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -1562,23 +1668,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -1768,23 +1880,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -1804,23 +1922,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -1888,23 +2012,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -1924,23 +2054,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -2011,41 +2147,6 @@ making sure that you maintain a proper JSON format. } ], "___0": [], - "enum": [ - "name", - "code", - "sampleName", - "schemaId", - "refPath", - "type", - { - "propertyName": "Comments", - "propertyKeyword": "description", - "propertyTooltip": "comments", - "addTimestampButton": false, - "propertyType": "details", - "template": "textarea" - }, - { - "propertyName": "Not null", - "propertyKeyword": "required", - "enableForReference": true, - "propertyType": "checkbox" - }, - "pattern", - "default", - "enum", - "sample", - { - "propertyName": "Remarks", - "propertyKeyword": "comments", - "shouldValidate": false, - "propertyTooltip": "remarks", - "addTimestampButton": true, - "propertyType": "details", - "template": "textarea" - } - ], "geometry": [ "name", "code", @@ -2181,23 +2282,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -2217,23 +2324,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -2301,23 +2414,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -2337,23 +2456,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -2475,23 +2600,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -2511,23 +2642,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -2595,23 +2732,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -2631,23 +2774,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -2790,23 +2939,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -2826,23 +2981,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -2910,23 +3071,29 @@ making sure that you maintain a proper JSON format. "values": [ { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] } @@ -2946,23 +3113,29 @@ making sure that you maintain a proper JSON format. }, { "type": "or", - "values": [{ - "key": "compositePrimaryKey", - "value": false - }, { - "key": "compositePrimaryKey", - "exist": false - }] + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] }, { "type": "or", - "values": [{ - "key": "compositeUniqueKey", - "value": false - }, { - "key": "compositeUniqueKey", - "exist": false - }] + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] } ] }, @@ -3100,10 +3273,6 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "subtype", "propertyType": "select", "options": [ - { - "name": " ", - "value": "string" - }, { "name": "object", "value": "object" @@ -3111,6 +3280,22 @@ making sure that you maintain a proper JSON format. { "name": "array", "value": "array" + }, + { + "name": "string", + "value": "string" + }, + { + "name": "number", + "value": "number" + }, + { + "name": "boolean", + "value": "boolean" + }, + { + "name": "null", + "value": "null" } ], "defaultValue": "object" @@ -3157,6 +3342,235 @@ making sure that you maintain a proper JSON format. "propertyType": "details", "template": "textarea" } - ] + ], + "___1": [], + "composite": [ + "name", + "code", + "sampleName", + "schemaId", + "refPath", + "type", + { + "propertyName": "Array type", + "propertyType": "group", + "propertyKeyword": "array_type", + "propertyTooltip": "Declaring the array number of dimensions is simply for documentation; it does not affect run-time behavior.", + "enableForReference": true, + "structure": [ + { + "propertyName": "Size limit", + "propertyKeyword": "array_size_limit", + "propertyTooltip": "Declaring the array size is simply for documentation; it does not affect run-time behavior.", + "propertyType": "numeric", + "valueType": "number", + "minValue": 0, + "step": 1 + } + ] + }, + { + "propertyName": "Comments", + "propertyKeyword": "description", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Not null", + "propertyKeyword": "required", + "enableForReference": true, + "propertyType": "checkbox" + }, + "sample", + { + "propertyName": "Remarks", + "propertyKeyword": "comments", + "shouldValidate": false, + "propertyTooltip": "remarks", + "addTimestampButton": true, + "propertyType": "details", + "template": "textarea" + } + ], + "enum": [ + "name", + "code", + "sampleName", + "schemaId", + "refPath", + "type", + { + "propertyName": "Array type", + "propertyType": "group", + "propertyKeyword": "array_type", + "propertyTooltip": "Declaring the array number of dimensions is simply for documentation; it does not affect run-time behavior.", + "enableForReference": true, + "structure": [ + { + "propertyName": "Size limit", + "propertyKeyword": "array_size_limit", + "propertyTooltip": "Declaring the array size is simply for documentation; it does not affect run-time behavior.", + "propertyType": "numeric", + "valueType": "number", + "minValue": 0, + "step": 1 + } + ] + }, + { + "propertyName": "Comments", + "propertyKeyword": "description", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Not null", + "propertyKeyword": "required", + "enableForReference": true, + "propertyType": "checkbox" + }, + "pattern", + "default", + "enum", + "sample", + { + "propertyName": "Remarks", + "propertyKeyword": "comments", + "shouldValidate": false, + "propertyTooltip": "remarks", + "addTimestampButton": true, + "propertyType": "details", + "template": "textarea" + } + ], + "range_type": [ + "name", + "code", + "schemaId", + "type", + { + "propertyName": "Array type", + "propertyType": "group", + "propertyKeyword": "array_type", + "propertyTooltip": "Declaring the array number of dimensions is simply for documentation; it does not affect run-time behavior.", + "enableForReference": true, + "structure": [ + { + "propertyName": "Size limit", + "propertyKeyword": "array_size_limit", + "propertyTooltip": "Declaring the array size is simply for documentation; it does not affect run-time behavior.", + "propertyType": "numeric", + "valueType": "number", + "minValue": 0, + "step": 1 + } + ] + }, + { + "propertyName": "Not null", + "propertyKeyword": "required", + "enableForReference": true, + "propertyType": "checkbox" + }, + "default", + { + "propertyName": "Comments", + "propertyKeyword": "description", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Subtype", + "propertyKeyword": "rangeSubtype", + "propertyTooltip": "The name of the element type that the range type will represent ranges of.", + "propertyType": "text", + "dependency": { + "level": "parent", + "key": "type", + "value": "definitions" + } + }, + { + "propertyName": "Operator class", + "propertyKeyword": "operatorClass", + "propertyTooltip": "The name of a b-tree operator class for the subtype.", + "propertyType": "text", + "dependency": { + "level": "parent", + "key": "type", + "value": "definitions" + } + }, + { + "propertyName": "Collation", + "propertyKeyword": "collation", + "propertyTooltip": "The name of an existing collation to be associated with a range type.", + "propertyType": "text", + "dependency": { + "level": "parent", + "key": "type", + "value": "definitions" + } + }, + { + "propertyName": "Canonical function", + "propertyKeyword": "canonicalFunction", + "propertyTooltip": "The name of the canonicalization function for the range type.", + "propertyType": "text", + "dependency": { + "level": "parent", + "key": "type", + "value": "definitions" + } + }, + { + "propertyName": "Subtype diff", + "propertyKeyword": "subtypeDiffFunction", + "propertyTooltip": "The name of a difference function for the subtype.", + "propertyType": "text", + "dependency": { + "level": "parent", + "key": "type", + "value": "definitions" + } + }, + { + "propertyName": "Multirange type", + "propertyKeyword": "multiRangeType", + "propertyTooltip": "The name of the corresponding multirange type.", + "propertyType": "text", + "dependency": { + "level": "parent", + "key": "type", + "value": "definitions" + } + }, + "unit", + "minimum", + "exclusiveMinimum", + "maximum", + "exclusiveMaximum", + "multipleOf", + "divisibleBy", + "pattern", + "enum", + "sample", + { + "propertyName": "Remarks", + "propertyKeyword": "comments", + "shouldValidate": false, + "propertyTooltip": "remarks", + "addTimestampButton": true, + "propertyType": "details", + "template": "textarea" + } + ], + "___2": [] } -} +} \ No newline at end of file diff --git a/types/composite.json b/types/composite.json new file mode 100644 index 0000000..f7a9a1b --- /dev/null +++ b/types/composite.json @@ -0,0 +1,13 @@ +{ + "name": "composite", + "erdAbbreviation": "", + "dtdAbbreviation": "{...}", + "parentType": "document", + "hiddenOnEntity": [ + "collection", + "view" + ], + "defaultValues": { + "properties": [] + } +} \ No newline at end of file diff --git a/types/enum.json b/types/enum.json index 81b51be..9398faf 100644 --- a/types/enum.json +++ b/types/enum.json @@ -3,20 +3,9 @@ "erdAbbreviation": "", "dtdAbbreviation": "{enum}", "parentType": "string", - "hiddenOnEntity": "view", - "defaultValues": { - "primaryKey": false, - "relationshipType": "", - "parentRelationship": "", - "childRelationships": [], - "foreignCollection": "", - "foreignField": [], - "default": "", - "minLength": "", - "maxLength": "", - "enum": [], - "sample":"", - "pattern": "", - "comments":"" - } + "hiddenOnEntity": [ + "collection", + "view" + ], + "defaultValues": {} } \ No newline at end of file diff --git a/types/json.json b/types/json.json index fcb70ec..66a0b5e 100644 --- a/types/json.json +++ b/types/json.json @@ -2,8 +2,13 @@ "name": "json", "erdAbbreviation": "", "dtdAbbreviation": "{...}", - "parentType": "string", + "parentType": "jsonObject", "hiddenOnEntity": "view", + "jsonType": { + "order": 1, + "name": "json", + "jsonRoot": true + }, "defaultValues": { "primaryKey": false, "relationshipType": "", @@ -11,10 +16,10 @@ "childRelationships": [], "foreignCollection": "", "foreignField": [], - "enabled": true, "additionalProperties": false, "mode": "json", - "subtype": "string" + "subtype": "object", + "properties": [] }, "subtypes": { "object": { @@ -40,7 +45,16 @@ ] }, "string": { - "parentType": "string" + "parentType": "jsonString" + }, + "number": { + "parentType": "jsonNumber" + }, + "boolean": { + "parentType": "jsonBoolean" + }, + "null": { + "parentType": "jsonNull" } } } \ No newline at end of file diff --git a/types/range_type.json b/types/range_type.json new file mode 100644 index 0000000..0843d1e --- /dev/null +++ b/types/range_type.json @@ -0,0 +1,11 @@ +{ + "name": "range_type", + "erdAbbreviation": "", + "dtdAbbreviation": "{123}", + "parentType": "string", + "hiddenOnEntity": [ + "collection", + "view" + ], + "defaultValues": {} +} \ No newline at end of file From 3d0061ab95040e823763411845acd112297db94f Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 13:32:26 +0300 Subject: [PATCH 18/69] RE: fixed Re of json types --- reverse_engineering/helpers/postgresHelpers/columnHelper.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index af8e300..83e5593 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -185,11 +185,13 @@ const getParsedJsonValueType = value => { return 'array'; } - if (typeof value === 'object') { + const type = typeof value; + + if (type === 'undefined') { return 'object'; } - return ''; + return type; }; module.exports = { From ac1ef3851b92fd720b49fd079a6c67ee671768a7 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 17:17:28 +0300 Subject: [PATCH 19/69] RE: added RE of user-defined types --- reverse_engineering/api.js | 77 +++++++++++-------- reverse_engineering/helpers/getJsonSchema.js | 12 ++- .../helpers/postgresHelpers/columnHelper.js | 40 +++++++--- .../postgresHelpers/userDefinedTypesHelper.js | 63 +++++++++++++++ .../helpers/postgresService.js | 31 +++++++- reverse_engineering/helpers/queryConstants.js | 51 +++++++++++- 6 files changed, 222 insertions(+), 52 deletions(-) create mode 100644 reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 439a646..69e4fa7 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -93,47 +93,58 @@ module.exports = { const schemasNames = data.collectionData.dataBaseNames; const { packages, relationships } = await Promise.all( - schemasNames.map(async schemaName => ({ - schemaName, - ...(await postgresService.retrieveEntitiesData( + schemasNames.map(async schemaName => { + const { tables, views, modelDefinitions } = await postgresService.retrieveEntitiesData( schemaName, collections[schemaName], data.recordSamplingSettings - )), - ...(await postgresService.retrieveFunctionsWithProcedures(schemaName)), - })) + ); + const { functions, procedures } = await postgresService.retrieveFunctionsWithProcedures(schemaName); + + return { + schemaName, + tables, + views, + functions, + procedures, + modelDefinitions, + }; + }) ).then(schemaData => { const relationships = schemaData .flatMap(({ tables }) => tables.map(entityData => entityData.relationships)) .flat(); - const packages = schemaData.flatMap(({ schemaName, tables, views, functions, procedures }) => { - const bucketInfo = { - UDFs: functions, - Procedures: procedures, - }; - - const tablePackages = tables.map(entityData => ({ - dbName: schemaName, - collectionName: entityData.name, - documents: entityData.documents, - views: [], - emptyBucket: false, - entityLevel: entityData.entityLevel, - validation: { - jsonSchema: entityData.jsonSchema, - }, - bucketInfo, - })); - - const viewPackage = { - dbName: schemaName, - views: views, - emptyBucket: false, - }; - - return [...tablePackages, viewPackage]; - }); + const packages = schemaData.flatMap( + ({ schemaName, tables, views, functions, procedures, modelDefinitions }) => { + const bucketInfo = { + UDFs: functions, + Procedures: procedures, + }; + + const tablePackages = tables.map(entityData => ({ + dbName: schemaName, + collectionName: entityData.name, + documents: entityData.documents, + views: [], + emptyBucket: false, + entityLevel: entityData.entityLevel, + validation: { + jsonSchema: entityData.jsonSchema, + }, + bucketInfo, + modelDefinitions, + })); + + const viewPackage = { + dbName: schemaName, + views: views, + emptyBucket: false, + }; + + return [...tablePackages, viewPackage]; + } + ); return { packages, relationships }; }); diff --git a/reverse_engineering/helpers/getJsonSchema.js b/reverse_engineering/helpers/getJsonSchema.js index c464ed7..0669bb3 100644 --- a/reverse_engineering/helpers/getJsonSchema.js +++ b/reverse_engineering/helpers/getJsonSchema.js @@ -1,5 +1,15 @@ -const getJsonSchema = (columns) => { +const getJsonSchema = columns => { const properties = columns.reduce((properties, column) => { + if (column.properties) { + return { + ...properties, + [column.name]: { + ...column, + ...getJsonSchema(column.properties), + }, + }; + } + return { ...properties, [column.name]: column, diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 83e5593..a4c6092 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -13,6 +13,7 @@ const columnPropertiesMapper = { NO: true, }, }, + not_null: 'required', data_type: 'type', numeric_precision: 'precision', numeric_scale: 'scale', @@ -23,10 +24,9 @@ const columnPropertiesMapper = { number_of_array_dimensions: 'numberOfArrayDimensions', udt_name: 'udt_name', character_maximum_length: 'length', - description: 'description', }; -const mapColumnData = column => { +const mapColumnData = userDefinedTypes => column => { return _.chain(column) .toPairs() .map(([key, value]) => [ @@ -35,22 +35,29 @@ const mapColumnData = column => { ]) .filter(([key, value]) => key && !_.isNil(value)) .fromPairs() - .thru(setColumnType) + .thru(setColumnType(userDefinedTypes)) .value(); }; -const setColumnType = column => ({ +const setColumnType = userDefinedTypes => column => ({ ...column, - ...mapType(column.type), - ...getArrayType(column), + ...getType(userDefinedTypes, column), }); -const getArrayType = column => { - if (column.type !== 'ARRAY') { - return {}; +const getType = (userDefinedTypes, column) => { + if (column.type === 'ARRAY') { + return getArrayType(userDefinedTypes, column); + } + + if (column.type === 'USER-DEFINED') { + return mapType(userDefinedTypes, column.udt_name); } - const typeData = mapType(column.udt_name.slice(1)); + return mapType(userDefinedTypes, column.type); +}; + +const getArrayType = (userDefinedTypes, column) => { + const typeData = mapType(userDefinedTypes, column.udt_name.slice(1)); return { ...typeData, @@ -58,7 +65,7 @@ const getArrayType = column => { }; }; -const mapType = type => { +const mapType = (userDefinedTypes, type) => { switch (type) { case 'bigint': case 'bigserial': @@ -116,9 +123,13 @@ const mapType = type => { case 'timestamptz': case 'timestamp with time zone': return { type: 'datetime', mode: 'timestamp', with_timezone: true }; + case 'timestamp without time zone': + return { type: 'datetime', mode: 'timestamp' }; case 'timetz': case 'time with time zone': return { type: 'datetime', mode: 'time', with_timezone: true }; + case 'time without time zone': + return { type: 'datetime', mode: 'time' }; case 'json': case 'jsonb': return { type: 'json', mode: type, subtype: 'object' }; @@ -148,8 +159,13 @@ const mapType = type => { case 'regrole': case 'regtype': return { type: 'oid', mode: type }; - default: + default: { + if (_.some(userDefinedTypes, { name: type })) { + return { $ref: `#/definitions/${type}` }; + } + return { type: 'char', mode: 'varchar' }; + } } }; diff --git a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js new file mode 100644 index 0000000..9d8ec06 --- /dev/null +++ b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js @@ -0,0 +1,63 @@ +const { mapColumnData } = require('./columnHelper'); + +let _ = null; + +const setDependencies = app => { + _ = app.require('lodash'); +}; + +const getUserDefinedTypes = udtResponse => { + return _.chain(udtResponse) + .map(typeData => { + switch (typeData.type) { + case 'e': + return getEnumType(typeData); + case 'r': + return getRangeType(typeData); + case 'c': + return getCompositeType(typeData); + default: + return null; + } + }) + .compact() + .value(); +}; + +const getEnumType = typeData => { + return { + name: typeData.name, + type: 'enum', + enum: typeData.enum_values || [], + }; +}; + +const getRangeType = typeData => { + return { + name: typeData.name, + type: 'range_type', + rangeSubtype: typeData.range_subtype || '', + operatorClass: typeData.range_opclass_name || '', + collation: typeData.range_collation_name || '', + canonicalFunction: typeData.range_canonical_proc || '', + subtypeDiffFunction: typeData.range_diff_proc || '', + }; +}; + +const getCompositeType = typeData => { + const columns = _.map(typeData.columns, mapColumnData([])); + + return { + name: typeData.name, + type: 'composite', + properties: columns, + }; +}; + +const isTypeComposite = typeData => typeData.type === 'c'; + +module.exports = { + setDependencies, + getUserDefinedTypes, + isTypeComposite, +}; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index a33b8d2..5dd43e3 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -28,6 +28,11 @@ const { prepareTableLevelData, prepareTableIndexes, } = require('./postgresHelpers/tableHelper'); +const { + setDependencies: setDependenciesInUserDefinedTypesHelper, + getUserDefinedTypes, + isTypeComposite, +} = require('./postgresHelpers/userDefinedTypesHelper'); const { setDependencies: setViewDependenciesInViewHelper, isViewByTableType, @@ -52,6 +57,7 @@ module.exports = { setDependenciesInForeignKeysHelper(app); setViewDependenciesInViewHelper(app); setFunctionHelperDependencies(app); + setDependenciesInUserDefinedTypesHelper(app); }, async connect(connectionInfo, specificLogger) { @@ -107,6 +113,7 @@ module.exports = { }, async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { + const userDefinedTypes = await this._retrieveUserDefinedTypes(schemaName); const schemaOidResult = await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true); const schemaOid = schemaOidResult?.oid; @@ -114,12 +121,12 @@ module.exports = { const tables = await mapPromises( tablesNames, - _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName) + _.bind(this._retrieveSingleTableData, this, recordSamplingSettings, schemaOid, schemaName, userDefinedTypes) ); const views = await mapPromises(viewsNames, _.bind(this._retrieveSingleViewData, this, schemaOid, schemaName)); - return { views, tables }; + return { views, tables, modelDefinitions: getJsonSchema(userDefinedTypes) }; }, async retrieveFunctionsWithProcedures(schemaName) { @@ -154,7 +161,23 @@ module.exports = { return { functions: userDefinedFunctions, procedures: userDefinedProcedures }; }, - async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, tableName) { + async _retrieveUserDefinedTypes(schemaName) { + const userDefinedTypes = await db.queryTolerant(queryConstants.GET_USER_DEFINED_TYPES, [schemaName]); + const udtsWithColumns = await mapPromises(userDefinedTypes, async typeData => { + if (isTypeComposite(typeData)) { + return { + ...typeData, + columns: await db.queryTolerant(queryConstants.GET_COMPOSITE_TYPE_COLUMNS, [typeData.pg_class_oid]), + }; + } + + return typeData; + }); + + return getUserDefinedTypes(udtsWithColumns); + }, + + async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, userDefinedTypes, tableName) { const tableLevelData = await db.queryTolerant( queryConstants.GET_TABLE_LEVEL_DATA, [tableName, schemaOid], @@ -189,7 +212,7 @@ module.exports = { const entityLevel = clearEmptyPropertiesInObject(tableData); - let targetAttributes = tableColumns.map(mapColumnData); + let targetAttributes = tableColumns.map(mapColumnData(userDefinedTypes)); const hasJsonTypes = checkHaveJsonTypes(targetAttributes); let documents = []; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index ce8f7f8..07d27a1 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -26,8 +26,7 @@ const queryConstants = { ORDER BY ordinal_position`, GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` SELECT attname AS name, - attndims AS number_of_array_dimensions, - obj_description(attrelid, 'pg_class') AS description + attndims AS number_of_array_dimensions FROM pg_catalog.pg_attribute WHERE attrelid = $1;`, GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, @@ -168,6 +167,54 @@ const queryConstants = { prorows AS estimated_rows, prokind AS kind FROM pg_catalog.pg_proc WHERE pronamespace = $1;`, + GET_USER_DEFINED_TYPES: ` + SELECT pg_type.typrelid AS pg_class_oid, + pg_type.typname AS name, + pg_type.typtype AS type, + pg_catalog.array_agg(pg_enum.enumlabel)::text[] AS enum_values, + range_subtype_type.typname AS range_subtype, + range_collation.collname AS range_collation_name, + range_opclass.opcname AS range_opclass_name, + range_canonical_proc.proname AS range_canonical_proc, + range_diff_proc.proname AS range_diff_proc + FROM pg_catalog.pg_type AS pg_type + LEFT JOIN pg_catalog.pg_class AS pg_class ON (pg_class.oid = pg_type.typrelid) + LEFT JOIN pg_catalog.pg_namespace AS pg_namespace ON (pg_namespace.oid = pg_type.typnamespace) + LEFT JOIN pg_catalog.pg_enum AS pg_enum ON (pg_enum.enumtypid = pg_type.oid) + LEFT JOIN pg_catalog.pg_range AS pg_range ON (pg_range.rngtypid = pg_type.oid) + LEFT JOIN pg_catalog.pg_type AS range_subtype_type ON (range_subtype_type.oid = pg_range.rngsubtype) + LEFT JOIN pg_catalog.pg_collation AS range_collation ON (range_collation.oid = pg_range.rngcollation) + LEFT JOIN pg_catalog.pg_opclass AS range_opclass ON (range_opclass.oid = pg_range.rngsubopc) + LEFT JOIN pg_catalog.pg_proc AS range_canonical_proc ON (range_canonical_proc.oid = pg_range.rngcanonical) + LEFT JOIN pg_catalog.pg_proc AS range_diff_proc ON (range_diff_proc.oid = pg_range.rngsubdiff) + WHERE pg_namespace.nspname = $1 + AND ((pg_type.typtype = 'c' + AND pg_class.relkind = 'c') + OR pg_type.typtype = 'e' + OR pg_type.typtype = 'r') + GROUP BY pg_class_oid, + pg_type.typname, + pg_type.typtype, + pg_class.oid, + range_subtype, + range_collation_name, + range_opclass_name, + range_canonical_proc, + range_diff_proc;`, + GET_COMPOSITE_TYPE_COLUMNS: ` + SELECT pg_attribute.attname AS column_name, + pg_type.typname AS data_type, + pg_get_expr(pg_attrdef.adbin, pg_attrdef.adrelid) AS columns_default, + pg_attribute.attnotnull AS not_null, + pg_collation.collname AS collation_name, + pg_attribute.attndims AS number_of_array_dimensions, + pg_attribute.atttypmod AS character_maximum_length + FROM pg_catalog.pg_attribute AS pg_attribute + LEFT JOIN pg_catalog.pg_type AS pg_type ON (pg_type.oid = pg_attribute.atttypid) + LEFT JOIN pg_catalog.pg_attrdef AS pg_attrdef ON (pg_attrdef.adrelid = pg_attribute.attrelid + AND pg_attrdef.adnum = pg_attribute.attnum) + LEFT JOIN pg_catalog.pg_collation AS pg_collation ON (pg_collation.oid = pg_attribute.attcollation) + WHERE pg_attribute.attrelid = $1`, }; const getQueryName = query => { From 3de90e058306b89a1262007a7af1416f147a3ac0 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 17:38:38 +0300 Subject: [PATCH 20/69] RE: fixed issue with redundant table --- reverse_engineering/api.js | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 69e4fa7..3e563b8 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -136,13 +136,17 @@ module.exports = { modelDefinitions, })); - const viewPackage = { - dbName: schemaName, - views: views, - emptyBucket: false, - }; + if (views?.length) { + const viewPackage = { + dbName: schemaName, + views: views, + emptyBucket: false, + }; + + return [...tablePackages, viewPackage]; + } - return [...tablePackages, viewPackage]; + return tablePackages; } ); return { packages, relationships }; From 190044ce2060689acb5b1e744b67f140233875da Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 17:43:51 +0300 Subject: [PATCH 21/69] Added tooltips --- properties_pane/entity_level/entityLevelConfig.json | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 0d77de2..e90d0a1 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -828,7 +828,8 @@ making sure that you maintain a proper JSON format. "options": [ "ASC", "DESC" - ] + ], + "propertyTooltip": "Specifies sort order" }, "nullsOrder": { "propertyType": "select", @@ -836,15 +837,18 @@ making sure that you maintain a proper JSON format. "", "NULLS FIRST", "NULLS LAST" - ] + ], + "propertyTooltip": "Specifies that nulls sort order" }, "collation": { "propertyType": "text", - "placeholder": "Collation" + "placeholder": "Collation", + "propertyTooltip": "The name of the collation to use for the index." }, "opclass": { "propertyType": "text", - "placeholder": "Opclass" + "placeholder": "Opclass", + "propertyTooltip": "The name of an operator class." } } }, From 0f61cc2400f4bb23707471374574394af4dd6301 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 18:09:10 +0300 Subject: [PATCH 22/69] Fixed localization --- reverse_engineering/config.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/config.json b/reverse_engineering/config.json index bfe4e9f..f9687fc 100644 --- a/reverse_engineering/config.json +++ b/reverse_engineering/config.json @@ -1,7 +1,7 @@ { "errors": { - "NO_DATABASES": "There is no databases in SQL Server instance", - "WRONG_CONNECTION": "Can not connect to SQL Server instance" + "NO_DATABASES": "There is no databases in PostgreSQL Server instance", + "WRONG_CONNECTION": "Can not connect to PostgreSQL Server instance" }, "defaultDdlType": "mariadb", "excludeDocKind": ["id"], From 36076e8f525ef57a8412d1f691d44ad8265ad42d Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 4 Oct 2021 18:11:23 +0300 Subject: [PATCH 23/69] RE: fixed query tolerance to fails --- reverse_engineering/helpers/postgresHelpers/viewHelper.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/helpers/postgresHelpers/viewHelper.js b/reverse_engineering/helpers/postgresHelpers/viewHelper.js index 11cc1f3..143d7ce 100644 --- a/reverse_engineering/helpers/postgresHelpers/viewHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/viewHelper.js @@ -21,8 +21,8 @@ const prepareViewData = (viewData, viewOptions) => { const data = { withCheckOption: Boolean(viewData.check_option), checkTestingScope: getCheckTestingScope(viewData.check_option), - view_option: _.join(viewOptions.view_options, ','), - temporary: viewOptions.persistence === 't', + view_option: _.join(viewOptions?.view_options, ','), + temporary: viewOptions?.persistence === 't', recursive: isViewRecursive(viewData), }; From 9f14d87f8696fef8eed6dca6aaccdbff5a4065e3 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 5 Oct 2021 16:15:02 +0300 Subject: [PATCH 24/69] RE: added schema name to inherits --- reverse_engineering/helpers/postgresService.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 5dd43e3..8106313 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -196,7 +196,7 @@ module.exports = { const partitioning = prepareTablePartition(partitionResult, tableColumns); const tableLevelProperties = prepareTableLevelData(tableLevelData); const description = getDescriptionFromResult(descriptionResult); - const inherits = inheritsResult?.parent_table_name; + const inherits = inheritsResult?.parent_table_name ? [schemaName, inheritsResult?.parent_table_name] : null; const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); const tableIndexes = prepareTableIndexes(tableIndexesResult); const relationships = prepareForeignKeys(tableForeignKeys, tableName, schemaName, tableColumns); From b287e6fcafd670771dee46c090670c6071bf4d87 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 5 Oct 2021 16:29:08 +0300 Subject: [PATCH 25/69] RE: added progress logs --- reverse_engineering/api.js | 2 ++ reverse_engineering/helpers/postgresService.js | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 3e563b8..ef3a9f9 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -101,6 +101,8 @@ module.exports = { ); const { functions, procedures } = await postgresService.retrieveFunctionsWithProcedures(schemaName); + postgresLogger.progress('Schema reversed successfully', schemaName); + return { schemaName, tables, diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 8106313..c40161a 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -130,6 +130,8 @@ module.exports = { }, async retrieveFunctionsWithProcedures(schemaName) { + logger.progress('Get Functions and Procedures', schemaName); + const schemaOid = (await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true))?.oid; const functionsWithProcedures = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES, [ @@ -162,6 +164,8 @@ module.exports = { }, async _retrieveUserDefinedTypes(schemaName) { + logger.progress('Get User-Defined Types', schemaName); + const userDefinedTypes = await db.queryTolerant(queryConstants.GET_USER_DEFINED_TYPES, [schemaName]); const udtsWithColumns = await mapPromises(userDefinedTypes, async typeData => { if (isTypeComposite(typeData)) { @@ -178,6 +182,8 @@ module.exports = { }, async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, userDefinedTypes, tableName) { + logger.progress('Get table data', schemaName, tableName); + const tableLevelData = await db.queryTolerant( queryConstants.GET_TABLE_LEVEL_DATA, [tableName, schemaOid], @@ -232,6 +238,8 @@ module.exports = { }, async _getTableColumns(tableName, schemaName, tableOid) { + logger.progress('Get columns', schemaName, tableName); + const tableColumns = await db.query(queryConstants.GET_TABLE_COLUMNS, [tableName, schemaName]); const tableColumnsAdditionalData = await db.queryTolerant(queryConstants.GET_TABLE_COLUMNS_ADDITIONAL_DATA, [ tableOid, @@ -246,6 +254,8 @@ module.exports = { }, async _getDocuments(schemaName, tableName, recordSamplingSettings) { + logger.progress('Sampling table', schemaName, tableName); + const fullTableName = `${schemaName}.${tableName}`; const quantity = (await db.queryTolerant(queryConstants.GET_ROWS_COUNT(fullTableName), [], true))?.quantity || 0; @@ -255,6 +265,8 @@ module.exports = { }, async _retrieveSingleViewData(schemaOid, schemaName, viewName) { + logger.progress('Get view data', schemaName, viewName); + viewName = removeViewNameSuffix(viewName); const viewData = await db.query(queryConstants.GET_VIEW_DATA, [viewName, schemaName], true); From 1ec4b83e62562438f0949198842f8c1ba6a75d73 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 5 Oct 2021 17:50:57 +0300 Subject: [PATCH 26/69] RE: added Re of database properties --- properties_pane/model_level/modelLevelConfig.json | 6 ++---- reverse_engineering/api.js | 4 +++- reverse_engineering/helpers/postgresService.js | 15 +++++++++++++++ reverse_engineering/helpers/queryConstants.js | 3 +++ 4 files changed, 23 insertions(+), 5 deletions(-) diff --git a/properties_pane/model_level/modelLevelConfig.json b/properties_pane/model_level/modelLevelConfig.json index 74b82b9..e902a23 100644 --- a/properties_pane/model_level/modelLevelConfig.json +++ b/properties_pane/model_level/modelLevelConfig.json @@ -155,15 +155,13 @@ making sure that you maintain a proper JSON format. { "propertyName": "Collation", "propertyKeyword": "LC_COLLATE", - "propertyTooltip": "Select from list of options", - "defaultValue": "English_United States.1252", + "propertyTooltip": "Collation order (LC_COLLATE) to use in the new database. ", "propertyType": "text" }, { "propertyName": "Character type", "propertyKeyword": "LC_CTYPE", - "propertyTooltip": "Select from list of options", - "defaultValue": "English_United States.1252", + "propertyTooltip": "Character classification (LC_CTYPE) to use in the new database. ", "propertyType": "text" }, { diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index ef3a9f9..5437b08 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -92,6 +92,8 @@ module.exports = { const collections = data.collectionData.collections; const schemasNames = data.collectionData.dataBaseNames; + const modelData = await postgresService.getDbLevelData(); + const { packages, relationships } = await Promise.all( schemasNames.map(async schemaName => { const { tables, views, modelDefinitions } = await postgresService.retrieveEntitiesData( @@ -154,7 +156,7 @@ module.exports = { return { packages, relationships }; }); - callback(null, packages, null, relationships); + callback(null, packages, modelData, relationships); } catch (error) { logger.log('error', prepareError(error), 'Retrieve tables data'); callback(prepareError(error)); diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index c40161a..32675d6 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -112,6 +112,21 @@ module.exports = { }); }, + async getDbLevelData() { + logger.progress('Get database data'); + + const database_name = (await db.queryTolerant(queryConstants.GET_DB_NAME, [], true))?.current_database; + const encoding = (await db.queryTolerant(queryConstants.GET_DB_ENCODING, [], true))?.server_encoding; + const LC_COLLATE = (await db.queryTolerant(queryConstants.GET_DB_COLLATE_NAME, [], true))?.default_collate_name; + + return { + database_name, + encoding, + LC_COLLATE, + LC_CTYPE: LC_COLLATE, + }; + }, + async retrieveEntitiesData(schemaName, entitiesNames, recordSamplingSettings) { const userDefinedTypes = await this._retrieveUserDefinedTypes(schemaName); const schemaOidResult = await db.queryTolerant(queryConstants.GET_NAMESPACE_OID, [schemaName], true); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 07d27a1..e12dd8f 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -215,6 +215,9 @@ const queryConstants = { AND pg_attrdef.adnum = pg_attribute.attnum) LEFT JOIN pg_catalog.pg_collation AS pg_collation ON (pg_collation.oid = pg_attribute.attcollation) WHERE pg_attribute.attrelid = $1`, + GET_DB_NAME: 'SELECT current_database();', + GET_DB_ENCODING: 'SHOW SERVER_ENCODING;', + GET_DB_COLLATE_NAME: 'SELECT default_collate_name FROM information_schema.character_sets;' }; const getQueryName = query => { From 761af4170660336d72b755e0e16232d2df12c0ef Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 8 Oct 2021 11:52:55 +0300 Subject: [PATCH 27/69] Fixed configs --- .../container_level/containerLevelConfig.json | 45 +++++++------------ .../entity_level/entityLevelConfig.json | 2 +- .../helpers/postgresHelpers/functionHelper.js | 1 - 3 files changed, 17 insertions(+), 31 deletions(-) diff --git a/properties_pane/container_level/containerLevelConfig.json b/properties_pane/container_level/containerLevelConfig.json index d644aea..20c54b7 100644 --- a/properties_pane/container_level/containerLevelConfig.json +++ b/properties_pane/container_level/containerLevelConfig.json @@ -192,21 +192,10 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Arg type default", - "propertyKeyword": "argumentTypeDefault", - "propertyType": "checkbox", - "defaultValue": true, - "propertyTooltip": "Uncheck if you need to specify another type" - }, - { - "propertyName": "Arg type expression", + "propertyName": "Default expression", "propertyKeyword": "defaultExpression", "propertyTooltip": "The argument types can be base, composite, or domain types, or can reference the type of a table column.", - "propertyType": "text", - "dependency": { - "key": "argumentTypeDefault", - "value": false - } + "propertyType": "text" } ] }, @@ -222,8 +211,17 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Any valid PostgreSQL data type", "propertyType": "text", "dependency": { - "key": "setOf", - "value": false + "type": "or", + "values": [ + { + "key": "functionReturnsSetOf", + "value": false + }, + { + "key": "functionReturnsSetOf", + "exists": false + } + ] } }, { @@ -253,7 +251,7 @@ making sure that you maintain a proper JSON format. "propertyType": "checkbox", "propertyTooltip": "Indicates that the function is a window function rather than a plain function. This is currently only useful for functions written in C.", "dependency": { - "key": "lang_name", + "key": "functionLanguage", "value": "c" } }, @@ -329,7 +327,7 @@ making sure that you maintain a proper JSON format. "step": 1, "propertyTooltip": "A positive number giving the estimated number of rows that the planner should expect the function to return.", "dependency": { - "key": "setOf", + "key": "functionReturnsSetOf", "value": true } }, @@ -432,22 +430,11 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The type of argument.", "propertyType": "text" }, - { - "propertyName": "Arg type default", - "propertyKeyword": "argumentTypeDefault", - "propertyType": "checkbox", - "defaultValue": true, - "propertyTooltip": "Uncheck if you need to specify another type" - }, { "propertyName": "Arg type expression", "propertyKeyword": "defaultExpression", "propertyTooltip": "The argument types can be base, composite, or domain types, or can reference the type of a table column.", - "propertyType": "text", - "dependency": { - "key": "argumentTypeDefault", - "value": false - } + "propertyType": "text" } ] }, diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index e90d0a1..f36c4ff 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -207,7 +207,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Using method", - "propertyKeyword": "method", + "propertyKeyword": "usingMethod", "propertyTooltip": "Optional clause to specify the table access method to use to store the contents for the new table; the method needs be an access method of type TABLE.", "propertyType": "text" }, diff --git a/reverse_engineering/helpers/postgresHelpers/functionHelper.js b/reverse_engineering/helpers/postgresHelpers/functionHelper.js index f71dab5..47faefb 100644 --- a/reverse_engineering/helpers/postgresHelpers/functionHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/functionHelper.js @@ -9,7 +9,6 @@ const mapFunctionArgs = args => { argumentMode: arg.parameter_mode, argumentName: arg.parameter_name, argumentType: arg.data_type, - argumentTypeDefault: !Boolean(arg.parameter_default), defaultExpression: arg.parameter_default, })); }; From 87fd74336643b8d763c91bbd5af097c822df063c Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 8 Oct 2021 15:55:46 +0300 Subject: [PATCH 28/69] Added support for multirange type, renamed range_type to range_udt --- .../field_level/fieldLevelConfig.json | 340 +++++++++++++++++- .../helpers/postgresHelpers/columnHelper.js | 7 + .../postgresHelpers/userDefinedTypesHelper.js | 2 +- types/datetime.json | 59 +-- types/multirange.json | 37 ++ types/object.json | 3 +- types/{range_type.json => range_udt.json} | 2 +- 7 files changed, 418 insertions(+), 32 deletions(-) create mode 100644 types/multirange.json rename types/{range_type.json => range_udt.json} (87%) diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 8ac45c5..0706d5a 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -2146,6 +2146,344 @@ making sure that you maintain a proper JSON format. "template": "textarea" } ], + "multirange": [ + "name", + "code", + "schemaId", + "type", + { + "propertyName": "Subtype", + "propertyKeyword": "mode", + "propertyType": "select", + "options": [ + "int4multirange", + "int8multirange", + "nummultirange", + "datemultirange", + "tsmultirange", + "tstzmultirange" + ], + "data": "options", + "valueType": "string" + }, + { + "propertyName": "Array type", + "propertyType": "group", + "propertyKeyword": "array_type", + "propertyTooltip": "Declaring the array number of dimensions is simply for documentation; it does not affect run-time behavior.", + "structure": [ + { + "propertyName": "Size limit", + "propertyKeyword": "array_size_limit", + "propertyTooltip": "Declaring the array size is simply for documentation; it does not affect run-time behavior.", + "propertyType": "numeric", + "valueType": "number", + "minValue": 0, + "step": 1 + } + ] + }, + { + "propertyName": "Not null", + "propertyKeyword": "required", + "enableForReference": true, + "propertyType": "checkbox" + }, + "default", + { + "propertyName": "Comments", + "propertyKeyword": "description", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + }, + { + "propertyName": "Primary key", + "propertyKeyword": "compositePrimaryKey", + "propertyType": "checkbox", + "dependency": { + "key": "compositePrimaryKey", + "value": true + }, + "disabled": true + }, + { + "propertyName": "Primary key", + "propertyKeyword": "primaryKey", + "enableForReference": true, + "propertyType": "checkbox", + "dependency": { + "type": "and", + "values": [ + { + "type": "or", + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] + }, + { + "type": "or", + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] + } + ] + } + }, + { + "propertyName": "Primary key options", + "propertyType": "block", + "propertyKeyword": "primaryKeyOptions", + "enableForReference": true, + "propertyTooltip": "Primary key options", + "dependency": { + "type": "and", + "values": [ + { + "key": "primaryKey", + "value": true + }, + { + "type": "or", + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] + }, + { + "type": "or", + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] + } + ] + }, + "structure": [ + { + "propertyName": "Constraint name", + "propertyKeyword": "constraintName", + "propertyTooltip": "", + "propertyType": "text" + }, + { + "propertyName": "Category", + "propertyKeyword": "indexCategory", + "propertyTooltip": "", + "propertyType": "select", + "defaultValue": "", + "options": [ + "", + "btree", + "hash", + "gist", + "spgist", + "gin", + "brin" + ] + }, + { + "propertyName": "Key order", + "propertyKeyword": "indexOrder", + "propertyTooltip": "", + "propertyType": "select", + "options": [ + "", + "ASC", + "DESC" + ] + }, + { + "propertyName": "Comment", + "propertyKeyword": "indexComment", + "propertyTooltip": "comment", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + } + ] + }, + { + "propertyName": "Unique", + "propertyKeyword": "compositeUniqueKey", + "propertyType": "checkbox", + "dependency": { + "key": "compositeUniqueKey", + "value": true + }, + "disabled": true + }, + { + "propertyName": "Unique", + "propertyKeyword": "unique", + "enableForReference": true, + "propertyType": "checkbox", + "dependency": { + "type": "and", + "values": [ + { + "type": "or", + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] + }, + { + "type": "or", + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] + } + ] + } + }, + { + "propertyName": "Unique key options", + "propertyType": "group", + "propertyKeyword": "uniqueKeyOptions", + "enableForReference": true, + "propertyTooltip": "Unique key options", + "dependency": { + "type": "and", + "values": [ + { + "key": "unique", + "value": true + }, + { + "type": "or", + "values": [ + { + "key": "compositePrimaryKey", + "value": false + }, + { + "key": "compositePrimaryKey", + "exist": false + } + ] + }, + { + "type": "or", + "values": [ + { + "key": "compositeUniqueKey", + "value": false + }, + { + "key": "compositeUniqueKey", + "exist": false + } + ] + } + ] + }, + "structure": [ + { + "propertyName": "Constraint name", + "propertyKeyword": "constraintName", + "propertyType": "text" + }, + { + "propertyName": "Category", + "propertyKeyword": "indexCategory", + "propertyTooltip": "", + "propertyType": "select", + "defaultValue": "", + "options": [ + "", + "btree", + "hash", + "gist", + "spgist", + "gin", + "brin" + ] + }, + { + "propertyName": "Key order", + "propertyKeyword": "indexOrder", + "propertyTooltip": "", + "propertyType": "select", + "options": [ + "", + "ASC", + "DESC" + ] + }, + { + "propertyName": "Comment", + "propertyKeyword": "indexComment", + "propertyTooltip": "comment", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + } + ] + }, + "foreignCollection", + "foreignField", + "relationshipType", + "unit", + "minimum", + "exclusiveMinimum", + "maximum", + "exclusiveMaximum", + "multipleOf", + "divisibleBy", + "pattern", + "enum", + "sample", + { + "propertyName": "Remarks", + "propertyKeyword": "comments", + "shouldValidate": false, + "propertyTooltip": "remarks", + "addTimestampButton": true, + "propertyType": "details", + "template": "textarea" + } + ], "___0": [], "geometry": [ "name", @@ -3447,7 +3785,7 @@ making sure that you maintain a proper JSON format. "template": "textarea" } ], - "range_type": [ + "range_udt": [ "name", "code", "schemaId", diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index a4c6092..2ffbde9 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -140,6 +140,13 @@ const mapType = (userDefinedTypes, type) => { case 'tsrange': case 'tstzrange': return { type: 'range', mode: type }; + case 'int4multirange': + case 'int8multirange': + case 'nummultirange': + case 'tsmultirange': + case 'tstzmultirange': + case 'datemultirange': + return { type: 'multirange', mode: type }; case 'uuid': case 'xml': case 'boolean': diff --git a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js index 9d8ec06..1cf853b 100644 --- a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js @@ -35,7 +35,7 @@ const getEnumType = typeData => { const getRangeType = typeData => { return { name: typeData.name, - type: 'range_type', + type: 'range_udt', rangeSubtype: typeData.range_subtype || '', operatorClass: typeData.range_opclass_name || '', collation: typeData.range_collation_name || '', diff --git a/types/datetime.json b/types/datetime.json index af0e1ad..dd70d5e 100644 --- a/types/datetime.json +++ b/types/datetime.json @@ -10,40 +10,43 @@ "primaryKey": false, "default": "", "enum": [], - "sample":"", - "comments":"", - "pattern":"", "format": "", "mode": "date", "scale": 7 }, - "descriptor": [{ - "schema": { - "mode": "date" + "descriptor": [ + { + "schema": { + "mode": "date" + }, + "format": "YYYY-MM-DD" }, - "format": "YYYY-MM-DD" - }, { - "schema": { - "mode": "time" + { + "schema": { + "mode": "time" + }, + "format": "hh:mm:ss" }, - "format": "hh:mm:ss" - }, { - "schema": { - "mode": "time", - "fractSecPrecision": 3 + { + "schema": { + "mode": "time", + "fractSecPrecision": 3 + }, + "format": "hh:mm:ss.nnn" }, - "format": "hh:mm:ss.nnn" - }, { - "schema": { - "mode": "time", - "fractSecPrecision": 6 + { + "schema": { + "mode": "time", + "fractSecPrecision": 6 + }, + "format": "hh:mm:ss.nnnnnn" }, - "format": "hh:mm:ss.nnnnnn" - }, { - "schema": { - "mode": "time", - "fractSecPrecision": 9 - }, - "format": "hh:mm:ss.nnnnnnnnn" - }] + { + "schema": { + "mode": "time", + "fractSecPrecision": 9 + }, + "format": "hh:mm:ss.nnnnnnnnn" + } + ] } \ No newline at end of file diff --git a/types/multirange.json b/types/multirange.json new file mode 100644 index 0000000..e38ad82 --- /dev/null +++ b/types/multirange.json @@ -0,0 +1,37 @@ +{ + "name": "multirange", + "erdAbbreviation": "", + "dtdAbbreviation": "{123}", + "parentType": "numeric", + "hiddenOnEntity": "view", + "defaultValues": { + "exclusiveMinimum": false, + "exclusiveMaximum": false, + "primaryKey": false, + "childRelationships": [], + "foreignField": [], + "enum": [], + "mode": "int4range" + }, + "descriptor": [ + { + "schema": { + "mode": "int4multirange" + }, + "capacity": 4 + }, + { + "schema": { + "mode": "int8multirange" + }, + "capacity": 8 + }, + { + "schema": { + "mode": "nummultirange" + }, + "capacity": 12, + "mode": "decimal" + } + ] +} \ No newline at end of file diff --git a/types/object.json b/types/object.json index 2b43330..7d1af1d 100644 --- a/types/object.json +++ b/types/object.json @@ -24,7 +24,8 @@ "xml", "json", "reference", - "multiple" + "multiple", + "multirange" ] } } diff --git a/types/range_type.json b/types/range_udt.json similarity index 87% rename from types/range_type.json rename to types/range_udt.json index 0843d1e..8a3d9e5 100644 --- a/types/range_type.json +++ b/types/range_udt.json @@ -1,5 +1,5 @@ { - "name": "range_type", + "name": "range_udt", "erdAbbreviation": "", "dtdAbbreviation": "{123}", "parentType": "string", From 36dc68bd0f51cee8529744211227b04cafdcbe9f Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 8 Oct 2021 16:43:22 +0300 Subject: [PATCH 29/69] RE: added support for RE of comments of columns --- .../helpers/postgresHelpers/columnHelper.js | 1 + reverse_engineering/helpers/queryConstants.js | 13 ++++++++----- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 2ffbde9..2dbdf5b 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -24,6 +24,7 @@ const columnPropertiesMapper = { number_of_array_dimensions: 'numberOfArrayDimensions', udt_name: 'udt_name', character_maximum_length: 'length', + description: 'description', }; const mapColumnData = userDefinedTypes => column => { diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index e12dd8f..18b476f 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -25,10 +25,13 @@ const queryConstants = { WHERE table_name = $1 AND table_schema = $2 ORDER BY ordinal_position`, GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` - SELECT attname AS name, - attndims AS number_of_array_dimensions - FROM pg_catalog.pg_attribute - WHERE attrelid = $1;`, + SELECT pg_attribute.attname AS name, + pg_attribute.attndims AS number_of_array_dimensions, + pg_description.description + FROM pg_catalog.pg_attribute AS pg_attribute + LEFT JOIN pg_catalog.pg_description AS pg_description ON (pg_description.objsubid=pg_attribute.attnum + AND pg_description.objoid = pg_attribute.attrelid) + WHERE pg_attribute.attrelid = $1;`, GET_DESCRIPTION_BY_OID: `SELECT obj_description($1)`, GET_ROWS_COUNT: fullTableName => `SELECT COUNT(*) AS quantity FROM ${fullTableName};`, GET_SAMPLED_DATA: fullTableName => `SELECT * FROM ${fullTableName} LIMIT $1;`, @@ -217,7 +220,7 @@ const queryConstants = { WHERE pg_attribute.attrelid = $1`, GET_DB_NAME: 'SELECT current_database();', GET_DB_ENCODING: 'SHOW SERVER_ENCODING;', - GET_DB_COLLATE_NAME: 'SELECT default_collate_name FROM information_schema.character_sets;' + GET_DB_COLLATE_NAME: 'SELECT default_collate_name FROM information_schema.character_sets;', }; const getQueryName = query => { From 2907c5887628161fc16f124e5db62903b72178d1 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 10:10:10 +0300 Subject: [PATCH 30/69] Configs: removed not needed synonyms, disabled enum value on entities --- .../field_level/fieldLevelConfig.json | 207 ------------------ types/object.json | 1 - 2 files changed, 208 deletions(-) diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 0706d5a..ea045ce 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -136,48 +136,6 @@ making sure that you maintain a proper JSON format. "valueType": "string", "cleanDependency": true }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "character" - ], - "dependency": { - "key": "mode", - "value": "char" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "character varying" - ], - "dependency": { - "key": "mode", - "value": "varchar" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "bit varying" - ], - "dependency": { - "key": "mode", - "value": "varbit" - } - }, { "propertyName": "Length", "propertyKeyword": "length", @@ -550,133 +508,6 @@ making sure that you maintain a proper JSON format. "data": "options", "valueType": "string" }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "int8" - ], - "dependency": { - "key": "mode", - "value": "bigint" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "serial8" - ], - "dependency": { - "key": "mode", - "value": "bigserial" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "float8" - ], - "dependency": { - "key": "mode", - "value": "double precision" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "int", - "int4" - ], - "dependency": { - "key": "mode", - "value": "integer" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "decimal" - ], - "dependency": { - "key": "mode", - "value": "numeric" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "float4" - ], - "dependency": { - "key": "mode", - "value": "real" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "int2" - ], - "dependency": { - "key": "mode", - "value": "smallint" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "serial2" - ], - "dependency": { - "key": "mode", - "value": "smallserial" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "serial4" - ], - "dependency": { - "key": "mode", - "value": "serial" - } - }, { "propertyName": "Precision", "propertyKeyword": "precision", @@ -1358,34 +1189,6 @@ making sure that you maintain a proper JSON format. "data": "options", "valueType": "string" }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "timetz" - ], - "dependency": { - "key": "mode", - "value": "time" - } - }, - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "timestamptz" - ], - "dependency": { - "key": "mode", - "value": "timestamp" - } - }, { "propertyName": "Precision", "propertyKeyword": "timePrecision", @@ -1755,16 +1558,6 @@ making sure that you maintain a proper JSON format. "code", "schemaId", "type", - { - "propertyName": "Synonym", - "propertyKeyword": "synonym", - "propertyTooltip": "Select from list of options", - "propertyType": "select", - "options": [ - "", - "bool" - ] - }, { "propertyName": "Comments", "propertyKeyword": "description", diff --git a/types/object.json b/types/object.json index 7d1af1d..22762be 100644 --- a/types/object.json +++ b/types/object.json @@ -16,7 +16,6 @@ "datetime", "boolean", "range", - "enum", "geometry", "inet", "uuid", From e30a18129ad3726f5cab119d3bd5d137f6d11e35 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 15:25:56 +0300 Subject: [PATCH 31/69] Fixed config issues --- .../entity_level/entityLevelConfig.json | 3 +-- types/binary.json | 13 +++------ types/datetime.json | 27 ++++++++++++++----- types/object.json | 5 +++- types/range.json | 23 +--------------- 5 files changed, 30 insertions(+), 41 deletions(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index f36c4ff..1089b54 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -225,8 +225,7 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "A percentage between 10 and 100. Complete packing (100) is the default.", "minValue": 10, "maxValue": 100, - "step": 1, - "defaultValue": 100 + "step": 1 }, { "propertyName": "Parallel workers", diff --git a/types/binary.json b/types/binary.json index b700020..68a6b9b 100644 --- a/types/binary.json +++ b/types/binary.json @@ -7,22 +7,15 @@ "hiddenOnEntity": "view", "defaultValues": { "primaryKey": false, - "mode": "binary", + "mode": "bytea", "length": 10 }, "descriptor": [ { "schema": { - "mode": "binary" + "mode": "bytea" }, - "size": 255, - "mode": "binary" - }, - { - "schema": { - "mode": "varbinary" - }, - "size": 21844, + "size": 4, "mode": "binary" } ] diff --git a/types/datetime.json b/types/datetime.json index dd70d5e..eac1273 100644 --- a/types/datetime.json +++ b/types/datetime.json @@ -29,24 +29,39 @@ }, { "schema": { - "mode": "time", - "fractSecPrecision": 3 + "mode": "time" }, "format": "hh:mm:ss.nnn" }, { "schema": { - "mode": "time", - "fractSecPrecision": 6 + "mode": "time" }, "format": "hh:mm:ss.nnnnnn" }, { "schema": { - "mode": "time", - "fractSecPrecision": 9 + "mode": "time" }, "format": "hh:mm:ss.nnnnnnnnn" + }, + { + "schema": { + "mode": "timestamp" + }, + "format": "YYYY-MM-DD hh:mm:ss" + }, + { + "schema": { + "mode": "timestamp" + }, + "format": "YYYY-MM-DD hh:mm:ss.nnnZ" + }, + { + "schema": { + "mode": "timestamp" + }, + "format": "YYYY-MM-DD hh:mm:ss.nnnnnnZ" } ] } \ No newline at end of file diff --git a/types/object.json b/types/object.json index 22762be..1db659c 100644 --- a/types/object.json +++ b/types/object.json @@ -24,7 +24,10 @@ "json", "reference", "multiple", - "multirange" + "multirange", + "enum", + "composite", + "range_udt" ] } } diff --git a/types/range.json b/types/range.json index 3a9cd34..b11c8f6 100644 --- a/types/range.json +++ b/types/range.json @@ -24,26 +24,5 @@ "enum": [], "mode": "int4range", "sample": "" - }, - "descriptor": [ - { - "schema": { - "mode": "int4range" - }, - "capacity": 4 - }, - { - "schema": { - "mode": "int8range" - }, - "capacity": 8 - }, - { - "schema": { - "mode": "numrange" - }, - "capacity": 12, - "mode": "decimal" - } - ] + } } \ No newline at end of file From c41506a0bc586a5d2be488f19bcd0096920b4d70 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 15:29:11 +0300 Subject: [PATCH 32/69] FE: Added FE of Tables, UDT, Functions and procedures --- forward_engineering/config.json | 3 +- forward_engineering/configs/defaultTypes.js | 20 +- forward_engineering/configs/templates.js | 59 +- forward_engineering/configs/types.js | 372 ++++---- forward_engineering/ddlProvider.js | 863 ++++++++---------- .../helpers/columnDefinitionHelper.js | 186 ++-- .../helpers/constraintsHelper.js | 154 ++-- forward_engineering/helpers/functionHelper.js | 90 ++ forward_engineering/helpers/general.js | 347 +------ forward_engineering/helpers/keyHelper.js | 322 ++++--- .../helpers/procedureHelper.js | 18 + forward_engineering/helpers/tableHelper.js | 127 +++ forward_engineering/helpers/udtHelper.js | 65 ++ 13 files changed, 1257 insertions(+), 1369 deletions(-) create mode 100644 forward_engineering/helpers/functionHelper.js create mode 100644 forward_engineering/helpers/procedureHelper.js create mode 100644 forward_engineering/helpers/tableHelper.js create mode 100644 forward_engineering/helpers/udtHelper.js diff --git a/forward_engineering/config.json b/forward_engineering/config.json index bde5e7a..760580f 100644 --- a/forward_engineering/config.json +++ b/forward_engineering/config.json @@ -1,12 +1,11 @@ { "type": "ddl", "ddlType": "plugin", - "namePrefix": "MariaDB", + "namePrefix": "PostgreSQL", "level": { "container": true, "entity": true, "view": true }, - "resolvedDefinitions": true, "applyScriptToInstance": true } \ No newline at end of file diff --git a/forward_engineering/configs/defaultTypes.js b/forward_engineering/configs/defaultTypes.js index 0de5a91..f19af29 100644 --- a/forward_engineering/configs/defaultTypes.js +++ b/forward_engineering/configs/defaultTypes.js @@ -1,12 +1,12 @@ module.exports = { - number: 'NUMBER', - string: 'TEXT', - date: 'DATE', - timestamp: 'TIMESTAMP', - binary: 'BINARY', - boolean: 'BIT', - document: 'LONGTEXT', - array: 'LONGTEXT', - objectId: 'VARCHAR(24)', - default: 'CHAR', + number: 'numeric', + string: 'text', + date: 'date', + timestamp: 'timestamp', + binary: 'bytea', + boolean: 'boolean', + document: 'jsonb', + array: 'jsonb', + objectId: 'uuid', + default: 'char', }; diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index eded852..91ba782 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -1,42 +1,45 @@ module.exports = { - createDatabase: 'CREATE${orReplace} DATABASE${ifNotExist} `${name}`${dbOptions};\n\nUSE `${name}`;\n', + createSchema: 'CREATE SCHEMA${ifNotExist} ${name};\nSET search_path TO ${name};\n${comment}\n', - createTable: - 'CREATE ${orReplace}${temporary}TABLE ${ifNotExist}${name} (\n' + - '\t${column_definitions}${keyConstraints}${checkConstraints}${foreignKeyConstraints}\n' + - ')${options}${partitions}${selectStatement};\n', + comment: "COMMENT ON ${object} ${objectName} IS '${comment}';\n", - createLikeTable: 'CREATE ${orReplace}${temporary}TABLE ${ifNotExist}${name} LIKE ${likeTableName};\n', + createTable: + 'CREATE${temporary} TABLE${ifNotExist} ${name} (\n' + + '${columnDefinitions}${keyConstraints}${checkConstraints}${foreignKeyConstraints}\n' + + ')${options};\n${comment}\n', - columnDefinition: - '`${name}` ${national}${type}${signed}${primary_key}${unique_key}${default}${autoIncrement}${zeroFill}${not_null}${invisible}${compressed}${charset}${collate}${comment}', + columnDefinition: '${name} ${type}${collation}${primaryKey}${uniqueKey}${defaultValue}${notNull}', - checkConstraint: 'CONSTRAINT ${name}CHECK (${expression})', + checkConstraint: '${name} CHECK (${expression})${noInherit}', - createForeignKeyConstraint: - 'CONSTRAINT `${name}` FOREIGN KEY (${foreignKey}) REFERENCES ${primaryTable}(${primaryKey})', + createForeignKeyConstraint: '${name} FOREIGN KEY (${foreignKey}) REFERENCES ${primaryTable} (${primaryKey})', - createKeyConstraint: '${constraintName}${keyType}${columns}${using}${blockSize}${comment}${ignore}', + createKeyConstraint: '${constraintName}${keyType}${columns}${includeNonKey}${storageParameters}${tablespace}', - createForeignKey: - 'ALTER TABLE ${foreignTable} ADD CONSTRAINT `${name}` FOREIGN KEY (${foreignKey}) REFERENCES ${primaryTable}(${primaryKey});', + createForeignKey: + 'ALTER TABLE IF EXISTS ${foreignTable} ADD CONSTRAINT ${name} FOREIGN KEY (${foreignKey}) REFERENCES ${primaryTable}(${primaryKey});', - index: - 'CREATE ${indexType}INDEX ${ifNotExist}${name}${indexCategory}\n' + - '\tON ${table} ( ${keys} )${indexOptions};\n', + index: + 'CREATE ${indexType}INDEX ${ifNotExist}${name}${indexCategory}\n' + + '\tON ${table} ( ${keys} )${indexOptions};\n', - createView: - 'CREATE ${orReplace}${algorithm}${sqlSecurity}VIEW ${ifNotExist}${name} AS ${selectStatement}${checkOption};\n', + createView: + 'CREATE ${orReplace}${algorithm}${sqlSecurity}VIEW ${ifNotExist}${name} AS ${selectStatement}${checkOption};\n', - viewSelectStatement: 'SELECT ${keys}\n\tFROM ${tableName}', + viewSelectStatement: 'SELECT ${keys}\n\tFROM ${tableName}', - createFunction: - 'CREATE ${orReplace}${aggregate}FUNCTION ${ifNotExist}${name}\n' + - '\t(${parameters})\n' + - '\tRETURNS ${type}\n' + - '\t${characteristics}\n' + - '${body}${delimiter}\n', + createFunction: + 'CREATE${orReplace} FUNCTION ${name}\n' + + '\t(${parameters})\n' + + '\tRETURNS ${returnType}\n' + + '\tLANGUAGE ${language}\n' + + '${properties}' + + 'AS ${definition};\n', - createProcedure: - 'CREATE ${orReplace}PROCEDURE ${name} (${parameters})\n' + '\t${characteristics}\n' + '${body}${delimiter}\n', + createProcedure: + 'CREATE${orReplace} PROCEDURE ${name} (${parameters})\n' + '\tLANGUAGE ${language}\n' + 'AS ${body};\n', + + createCompositeType: 'CREATE TYPE ${name} AS (\n\t${columnDefinitions}\n);\n${comment}\n', + createEnumType: 'CREATE TYPE ${name} AS ENUM (${values});\n${comment}\n', + createRangeType: 'CREATE TYPE ${name} AS RANGE (\n\tSUBTYPE=${subtype}${options}\n);\n${comment}\n', }; diff --git a/forward_engineering/configs/types.js b/forward_engineering/configs/types.js index c46e492..d90b499 100644 --- a/forward_engineering/configs/types.js +++ b/forward_engineering/configs/types.js @@ -1,182 +1,194 @@ module.exports = { - TINYINT: { - capacity: 1, - }, - SMALLINT: { - capacity: 2, - }, - MEDIUMINT: { - capacity: 3, - }, - INT: { - capacity: 4, - }, - INTEGER: { - capacity: 4, - }, - BIGINT: { - capacity: 8, - }, - INT1: { - capacity: 1, - }, - INT2: { - capacity: 2, - }, - INT3: { - capacity: 3, - }, - INT4: { - capacity: 4, - }, - INT8: { - capacity: 8, - }, - FLOAT: { - capacity: 4, - mode: 'floating', - }, - DOUBLE: { - capacity: 8, - mode: 'floating', - }, - 'DOUBLE PRECISION': { - capacity: 8, - mode: 'floating', - }, - REAL: { - capacity: 8, - mode: 'floating', - }, - DECIMAL: { - capacity: 16, - mode: 'decimal', - }, - DEC: { - capacity: 16, - mode: 'decimal', - }, - NUMERIC: { - capacity: 16, - mode: 'decimal', - }, - FIXED: { - capacity: 16, - mode: 'decimal', - }, - NUMBER: { - capacity: 16, - mode: 'decimal', - }, - CHAR: { - size: 1, - }, - VARCHAR: { - mode: 'varying', - }, - TINYTEXT: { - size: 255, - mode: 'text', - }, - TEXT: { - size: 65535, - mode: 'text', - }, - MEDIUMTEXT: { - size: 16777215, - mode: 'text', - }, - LONGTEXT: { - size: 4294967295, - mode: 'text', - }, - JSON: { - size: 4294967295, - mode: 'text', - }, - BINARY: { - mode: 'binary', - }, - 'CHAR BYTE': { - mode: 'binary', - }, - VARBINARY: { - size: 2147483649, - mode: 'binary', - }, - TINYBLOB: { - size: 255, - mode: 'binary', - }, - BLOB: { - size: 65535, - mode: 'binary', - }, - MEDIUMBLOB: { - size: 16777215, - mode: 'binary', - }, - LONGBLOB: { - size: 4294967295, - mode: 'binary', - }, - BIT: { - mode: 'boolean', - }, - DATE: { - format: 'YYYY-MM-DD', - }, - TIME: { - format: 'hh:mm:ss.nnnnnn', - }, - DATETIME: { - format: 'YYYY-MM-DD hh:mm:ss', - }, - TIMESTAMP: { - format: 'YYYY-MM-DD hh:mm:ss', - }, - YEAR: { - format: 'YYYY', - }, - INET6: { - mode: 'ip', - }, - ENUM: { - mode: 'enum', - }, - SET: { - mode: 'enum', - }, - GEOMETRY: { - format: 'euclidian', - mode: 'geospatial', - }, - POINT: { - format: 'euclidian', - mode: 'geospatial', - }, - LINESTRING: { - format: 'euclidian', - mode: 'geospatial', - }, - POLYGON: { - format: 'euclidian', - mode: 'geospatial', - }, - MULTIPOINT: { - format: 'euclidian', - mode: 'geospatial', - }, - MULTILINESTRING: { - format: 'euclidian', - mode: 'geospatial', - }, - MULTIPOLYGON: { - format: 'euclidian', - mode: 'geospatial', - }, - GEOMETRYCOLLECTION: { - format: 'euclidian', - mode: 'geospatial', - }, + char: { + size: 1, + }, + varchar: { + mode: 'varying', + }, + text: { + mode: 'text', + }, + bit: { + size: 1, + mode: 'bit', + }, + varbit: { + size: 1, + mode: 'varying', + }, + smallint: { + capacity: 2, + }, + integer: { + capacity: 4, + }, + bigint: { + capacity: 8, + }, + numeric: { + capacity: 12, + mode: 'decimal', + }, + real: { + capacity: 4, + mode: 'floating', + }, + 'double precision': { + capacity: 8, + mode: 'floating', + }, + smallserial: { + capacity: 2, + }, + serial: { + capacity: 4, + }, + bigserial: { + capacity: 8, + }, + money: { + capacity: 8, + mode: 'decimal', + }, + bytea: { + size: 4, + mode: 'binary', + }, + date: { + format: 'YYYY-MM-DD', + }, + time: { + format: 'hh:mm:ss.nnnnnn', + }, + timestamp: { + format: 'YYYY-MM-DD hh:mm:ss', + }, + interval: {}, + boolean: { + mode: 'boolean', + }, + int4range: { + capacity: 4, + mode: 'range', + }, + int8range: { + capacity: 8, + mode: 'range', + }, + numrange: { + capacity: 12, + mode: 'decimal', + mode: 'range', + }, + daterange: { + format: 'YYYY-MM-DD', + mode: 'range', + }, + tsrange: { + format: 'YYYY-MM-DD hh:mm:ss', + mode: 'range', + }, + tstzrange: { + format: 'YYYY-MM-DD hh:mm:ss', + mode: 'range', + }, + + int4multirange: { + capacity: 4, + mode: 'multirange', + }, + int8multirange: { + capacity: 12, + mode: 'decimal', + mode: 'multirange', + }, + nummultirange: { + capacity: 12, + mode: 'decimal', + mode: 'multirange', + }, + datemultirange: { + format: 'YYYY-MM-DD', + mode: 'multirange', + }, + tsmultirange: { + format: 'YYYY-MM-DD hh:mm:ss', + mode: 'multirange', + }, + tstzmultirange: { + format: 'YYYY-MM-DD hh:mm:ss', + mode: 'multirange', + }, + point: { + format: 'euclidian', + mode: 'geospatial', + }, + line: { + format: 'euclidian', + mode: 'geospatial', + }, + lseg: { + format: 'euclidian', + mode: 'geospatial', + }, + box: { + format: 'euclidian', + mode: 'geospatial', + }, + path: { + format: 'euclidian', + mode: 'geospatial', + }, + polygon: { + format: 'euclidian', + mode: 'geospatial', + }, + circle: { + format: 'euclidian', + mode: 'geospatial', + }, + inet: { + mode: 'ip', + }, + cidr: { + mode: 'ip', + }, + macaddr: {}, + macaddr8: {}, + uuid: { + mode: 'uuid', + }, + oid: { + mode: 'uuid', + }, + regclass: {}, + regcollation: {}, + regconfig: {}, + regdictionary: {}, + regnamespace: {}, + regoper: {}, + regoperator: {}, + regproc: {}, + regprocedure: {}, + regrole: {}, + regtype: {}, + xml: { + mode: 'xml', + }, + json: { + format: 'semi-structured', + }, + jsonb: { + format: 'semi-structured', + }, + composite: { + format: 'semi-structured', + mode: 'object', + }, + enum: { + mode: 'enum', + }, + range_udt: { + mode: 'range', + }, }; diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 5ff1d2d..28deded 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -3,498 +3,373 @@ const types = require('./configs/types'); const templates = require('./configs/templates'); module.exports = (baseProvider, options, app) => { - const { - tab, - commentIfDeactivated, - checkAllKeysDeactivated, - divideIntoActivatedAndDeactivated, - hasType, - wrap, - clean, - } = app.utils.general; - const assignTemplates = app.utils.assignTemplates; - const _ = app.require('lodash'); - const { decorateDefault, decorateType, canBeNational, getSign } = require('./helpers/columnDefinitionHelper')(_, wrap); - const { getTableName, getTableOptions, getPartitions, getViewData, getCharacteristics } = require('./helpers/general')(_, wrap); - const { - generateConstraintsString, - foreignKeysToString, - foreignActiveKeysToString, - createKeyConstraint, - } = require('./helpers/constraintsHelper')({ - _, - commentIfDeactivated, - checkAllKeysDeactivated, - divideIntoActivatedAndDeactivated, - assignTemplates, - }); - const keyHelper = require('./helpers/keyHelper')(_, clean); - - return { - createDatabase({ databaseName, orReplace, ifNotExist, collation, characterSet, comments, udfs, procedures }) { - let dbOptions = ''; - dbOptions += characterSet ? tab(`\nCHARACTER SET = '${characterSet}'`) : ''; - dbOptions += collation ? tab(`\nCOLLATE = '${collation}'`) : ''; - dbOptions += comments ? tab(`\nCOMMENT = '${comments}'`) : ''; - - const databaseStatement = assignTemplates(templates.createDatabase, { - name: databaseName, - orReplace: orReplace && !ifNotExist ? ' OR REPLACE' : '', - ifNotExist: ifNotExist ? ' IF NOT EXISTS' : '', - dbOptions: dbOptions, - }); - const udfStatements = udfs.map(udf => { - const characteristics = getCharacteristics(udf.characteristics); - let startDelimiter = udf.delimiter ? `DELIMITER ${udf.delimiter}\n` : ''; - let endDelimiter = udf.delimiter ? `DELIMITER ;\n` : ''; - - return ( - startDelimiter + - assignTemplates(templates.createFunction, { - name: getTableName(udf.name, databaseName), - orReplace: udf.orReplace ? 'OR REPLACE ' : '', - ifNotExist: udf.ifNotExist ? 'IF NOT EXISTS ' : '', - aggregate: udf.aggregate ? 'AGGREGATE ' : '', - characteristics: characteristics.join('\n\t'), - type: udf.type, - parameters: udf.parameters, - body: udf.body, - delimiter: udf.delimiter || ';', - }) + - endDelimiter - ); - }); - const procStatements = procedures.map(procedure => { - const characteristics = getCharacteristics(procedure.characteristics); - let startDelimiter = procedure.delimiter ? `DELIMITER ${procedure.delimiter}\n` : ''; - let endDelimiter = procedure.delimiter ? `DELIMITER ;\n` : ''; - - return ( - startDelimiter + - assignTemplates(templates.createProcedure, { - name: getTableName(procedure.name, databaseName), - orReplace: procedure.orReplace ? 'OR REPLACE ' : '', - parameters: procedure.parameters, - characteristics: characteristics.join('\n\t'), - body: procedure.body, - delimiter: procedure.delimiter || ';', - }) + - endDelimiter - ); - }); - - return [databaseStatement, ...udfStatements, ...procStatements].join('\n'); - }, - - createTable( - { - name, - columns, - dbData, - temporary, - orReplace, - ifNotExist, - likeTableName, - selectStatement, - options, - partitioning, - checkConstraints, - foreignKeyConstraints, - keyConstraints, - }, - isActivated, - ) { - const tableName = getTableName(name, dbData.databaseName); - const orReplaceTable = orReplace ? 'OR REPLACE ' : ''; - const temporaryTable = temporary ? 'TEMPORARY ' : ''; - const ifNotExistTable = ifNotExist ? 'IF NOT EXISTS ' : ''; - - if (likeTableName) { - return assignTemplates(templates.createLikeTable, { - name: tableName, - likeTableName: getTableName(likeTableName, dbData.databaseName), - orReplace: orReplaceTable, - temporary: temporaryTable, - ifNotExist: ifNotExistTable, - }); - } - - const dividedKeysConstraints = divideIntoActivatedAndDeactivated( - keyConstraints.map(createKeyConstraint(templates, isActivated)), - key => key.statement, - ); - const keyConstraintsString = generateConstraintsString(dividedKeysConstraints, isActivated); - - const dividedForeignKeys = divideIntoActivatedAndDeactivated(foreignKeyConstraints, key => key.statement); - const foreignKeyConstraintsString = generateConstraintsString(dividedForeignKeys, isActivated); - - const tableStatement = assignTemplates(templates.createTable, { - name: tableName, - column_definitions: columns.join(',\n\t'), - selectStatement: selectStatement ? ` ${selectStatement}` : '', - orReplace: orReplaceTable, - temporary: temporaryTable, - ifNotExist: ifNotExistTable, - options: getTableOptions(options), - partitions: getPartitions(partitioning), - checkConstraints: checkConstraints.length ? ',\n\t' + checkConstraints.join(',\n\t') : '', - foreignKeyConstraints: foreignKeyConstraintsString, - keyConstraints: keyConstraintsString, - }); - - return tableStatement; - }, - - convertColumnDefinition(columnDefinition) { - const type = _.toUpper(columnDefinition.type); - const notNull = columnDefinition.nullable ? '' : ' NOT NULL'; - const primaryKey = columnDefinition.primaryKey ? ' PRIMARY KEY' : ''; - const unique = columnDefinition.unique ? ' UNIQUE' : ''; - const zeroFill = columnDefinition.zerofill ? ' ZEROFILL' : ''; - const autoIncrement = columnDefinition.autoIncrement ? ' AUTO_INCREMENT' : ''; - const invisible = columnDefinition.invisible ? ' INVISIBLE' : ''; - const national = columnDefinition.national && canBeNational(type) ? 'NATIONAL ' : ''; - const comment = columnDefinition.comment ? ` COMMENT='${columnDefinition.comment}'` : ''; - const charset = type !== 'JSON' && columnDefinition.charset ? ` CHARSET ${columnDefinition.charset}` : ''; - const collate = type !== 'JSON' && columnDefinition.charset && columnDefinition.collation ? ` COLLATE ${columnDefinition.collation}` : ''; - const defaultValue = !_.isUndefined(columnDefinition.default) - ? ' DEFAULT ' + decorateDefault(type, columnDefinition.default) - : ''; - const compressed = columnDefinition.compressionMethod - ? ` COMPRESSED=${columnDefinition.compressionMethod}` - : ''; - const signed = getSign(type, columnDefinition.signed); - - return commentIfDeactivated( - assignTemplates(templates.columnDefinition, { - name: columnDefinition.name, - type: decorateType(type, columnDefinition), - not_null: notNull, - primary_key: primaryKey, - unique_key: unique, - default: defaultValue, - autoIncrement, - compressed, - signed, - zeroFill, - invisible, - comment, - national, - charset, - collate, - }), - { - isActivated: columnDefinition.isActivated, - }, - ); - }, - - createIndex(tableName, index, dbData, isParentActivated = true) { - if (_.isEmpty(index.indxKey) || !index.indxName) { - return ''; - } - - const allDeactivated = checkAllKeysDeactivated(index.indxKey || []); - const wholeStatementCommented = index.isActivated === false || !isParentActivated || allDeactivated; - const indexType = index.indexType ? `${_.toUpper(index.indexType)} ` : ''; - const ifNotExist = index.ifNotExist ? 'IF NOT EXISTS ' : ''; - const name = wrap(index.indxName || '', '`', '`'); - const table = getTableName(tableName, dbData.databaseName); - const indexCategory = index.indexCategory ? ` USING ${index.indexCategory}` : ''; - let indexOptions = []; - - const dividedKeys = divideIntoActivatedAndDeactivated( - index.indxKey || [], - key => `\`${key.name}\`${key.type === 'DESC' ? ' DESC' : ''}`, - ); - const commentedKeys = dividedKeys.deactivatedItems.length - ? commentIfDeactivated(dividedKeys.deactivatedItems.join(', '), { - isActivated: wholeStatementCommented, - isPartOfLine: true, - }) - : ''; - - if (_.toLower(index.waitNoWait) === 'wait' && index.waitValue) { - indexOptions.push(`WAIT ${index.waitValue}`); - } - - if (_.toLower(index.waitNoWait) === 'nowait') { - indexOptions.push(`NOWAIT`); - } - - if (index.indexComment) { - indexOptions.push(`COMMENT '${index.indexComment}'`); - } - - if (index.indexLock) { - indexOptions.push(`LOCK ${index.indexLock}`); - } else if (index.indexAlgorithm) { - indexOptions.push(`ALGORITHM ${index.indexAlgorithm}`); - } - - const indexStatement = assignTemplates(templates.index, { - keys: - dividedKeys.activatedItems.join(', ') + - (wholeStatementCommented && commentedKeys && dividedKeys.activatedItems.length - ? ', ' + commentedKeys - : commentedKeys), - indexOptions: indexOptions.length ? '\n\t' + indexOptions.join('\n\t') : '', - name, - table, - indexType, - ifNotExist, - indexCategory, - }); - - if (wholeStatementCommented) { - return commentIfDeactivated(indexStatement, { isActivated: false }); - } else { - return indexStatement; - } - }, - - createCheckConstraint(checkConstraint) { - return assignTemplates(templates.checkConstraint, { - name: checkConstraint.name ? `${wrap(checkConstraint.name, '`', '`')} ` : '', - expression: _.trim(checkConstraint.expression).replace(/^\(([\s\S]*)\)$/, '$1'), - }); - }, - - createForeignKeyConstraint( - { name, foreignKey, primaryTable, primaryKey, primaryTableActivated, foreignTableActivated }, - dbData, - ) { - const isAllPrimaryKeysDeactivated = checkAllKeysDeactivated(primaryKey); - const isAllForeignKeysDeactivated = checkAllKeysDeactivated(foreignKey); - const isActivated = - !isAllPrimaryKeysDeactivated && - !isAllForeignKeysDeactivated && - primaryTableActivated && - foreignTableActivated; - - return { - statement: assignTemplates(templates.createForeignKeyConstraint, { - primaryTable: getTableName(primaryTable, dbData.databaseName), - name, - foreignKey: isActivated ? foreignKeysToString(foreignKey) : foreignActiveKeysToString(foreignKey), - primaryKey: isActivated ? foreignKeysToString(primaryKey) : foreignActiveKeysToString(primaryKey), - }), - isActivated, - }; - }, - - createForeignKey( - { name, foreignTable, foreignKey, primaryTable, primaryKey, primaryTableActivated, foreignTableActivated }, - dbData, - ) { - const isAllPrimaryKeysDeactivated = checkAllKeysDeactivated(primaryKey); - const isAllForeignKeysDeactivated = checkAllKeysDeactivated(foreignKey); - - return { - statement: assignTemplates(templates.createForeignKey, { - primaryTable: getTableName(primaryTable, dbData.databaseName), - foreignTable: getTableName(foreignTable, dbData.databaseName), - name, - foreignKey: foreignKeysToString(foreignKey), - primaryKey: foreignKeysToString(primaryKey), - }), - isActivated: - !isAllPrimaryKeysDeactivated && - !isAllForeignKeysDeactivated && - primaryTableActivated && - foreignTableActivated, - }; - }, - - createView(viewData, dbData, isActivated) { - const allDeactivated = checkAllKeysDeactivated(viewData.keys || []); - const deactivatedWholeStatement = allDeactivated || !isActivated; - const { columns, tables } = getViewData(viewData.keys, dbData); - let columnsAsString = columns.map(column => column.statement).join(',\n\t\t'); - - if (!deactivatedWholeStatement) { - const dividedColumns = divideIntoActivatedAndDeactivated(columns, column => column.statement); - const deactivatedColumnsString = dividedColumns.deactivatedItems.length - ? commentIfDeactivated(dividedColumns.deactivatedItems.join(',\n\t\t'), { - isActivated: false, - isPartOfLine: true, - }) - : ''; - columnsAsString = dividedColumns.activatedItems.join(',\n\t\t') + deactivatedColumnsString; - } - - const selectStatement = _.trim(viewData.selectStatement) - ? _.trim(tab(viewData.selectStatement)) - : assignTemplates(templates.viewSelectStatement, { - tableName: tables.join(', '), - keys: columnsAsString, - }); - - const algorithm = viewData.algorithm && viewData.algorithm !== 'UNDEFINED' ? `ALGORITHM ${viewData.algorithm} ` : ''; - - return commentIfDeactivated( - assignTemplates(templates.createView, { - name: getTableName(viewData.name, dbData.databaseName), - orReplace: viewData.orReplace ? 'OR REPLACE ' : '', - ifNotExist: viewData.ifNotExist ? 'IF NOT EXISTS ' : '', - sqlSecurity: viewData.sqlSecurity ? `SQL SECURITY ${viewData.sqlSecurity} ` : '', - checkOption: viewData.checkOption ? `\nWITH ${viewData.checkOption} CHECK OPTION` : '', - selectStatement, - algorithm, - }), - { isActivated: !deactivatedWholeStatement }, - ); - }, - - createViewIndex(viewName, index, dbData, isParentActivated) { - return ''; - }, - - createUdt(udt, dbData) { - return ''; - }, - - getDefaultType(type) { - return defaultTypes[type]; - }, - - getTypesDescriptors() { - return types; - }, - - hasType(type) { - return hasType(types, type); - }, - - hydrateColumn({ columnDefinition, jsonSchema, dbData }) { - return { - name: columnDefinition.name, - type: columnDefinition.type, - primaryKey: keyHelper.isInlinePrimaryKey(jsonSchema), - unique: keyHelper.isInlineUnique(jsonSchema), - nullable: columnDefinition.nullable, - default: columnDefinition.default, - comment: columnDefinition.description, - isActivated: columnDefinition.isActivated, - length: columnDefinition.enum, - scale: columnDefinition.scale, - precision: columnDefinition.precision, - length: columnDefinition.length, - national: jsonSchema.national, - autoIncrement: jsonSchema.autoincrement, - zerofill: jsonSchema.zerofill, - invisible: jsonSchema.invisible, - compressionMethod: jsonSchema.compressed ? jsonSchema.compression_method : '', - enum: jsonSchema.enum, - synonym: jsonSchema.synonym, - signed: jsonSchema.zerofill || jsonSchema.signed, - microSecPrecision: jsonSchema.microSecPrecision, - charset: jsonSchema.characterSet, - collation: jsonSchema.collation, - }; - }, - - hydrateIndex(indexData, tableData) { - return indexData; - }, - - hydrateViewIndex(indexData) { - return {}; - }, - - hydrateCheckConstraint(checkConstraint) { - return { - name: checkConstraint.chkConstrName, - expression: checkConstraint.constrExpression, - }; - }, - - hydrateDatabase(containerData, data) { - return { - databaseName: containerData.name, - orReplace: containerData.orReplace, - ifNotExist: containerData.ifNotExist, - characterSet: containerData.characterSet, - collation: containerData.collation, - comments: containerData.description, - udfs: (data?.udfs || []).map(udf => ({ - name: udf.name, - delimiter: udf.functionDelimiter, - orReplace: udf.functionOrReplace, - aggregate: udf.functionAggregate, - ifNotExist: udf.functionIfNotExist, - parameters: udf.functionArguments, - type: udf.functionReturnType, - characteristics: { - sqlSecurity: udf.functionSqlSecurity, - language: udf.functionLanguage, - contains: udf.functionContains, - deterministic: udf.functionDeterministic, - comment: udf.functionDescription, - }, - body: udf.functionBody, - })), - procedures: (data?.procedures || []).map(procedure => ({ - orReplace: procedure.orReplace, - delimiter: procedure.delimiter, - name: procedure.name, - parameters: procedure.inputArgs, - body: procedure.body, - characteristics: { - comment: procedure.comments, - contains: procedure.contains, - language: procedure.language, - deterministic: procedure.deterministic, - sqlSecurity: procedure.securityMode, - }, - })), - }; - }, - - hydrateTable({ tableData, entityData, jsonSchema }) { - const detailsTab = entityData[0]; - const likeTable = _.get(tableData, `relatedSchemas[${detailsTab.like}]`, ''); - - return { - ...tableData, - keyConstraints: keyHelper.getTableKeyConstraints({ jsonSchema }), - temporary: detailsTab.temporary, - orReplace: detailsTab.orReplace, - ifNotExist: !detailsTab.orReplace && detailsTab.ifNotExist, - likeTableName: likeTable?.code || likeTable?.collectionName, - selectStatement: _.trim(detailsTab.selectStatement), - options: detailsTab.tableOptions, - partitioning: detailsTab.partitioning, - }; - }, - - hydrateViewColumn(data) { - return { - name: data.name, - tableName: data.entityName, - alias: data.alias, - isActivated: data.isActivated, - }; - }, - - hydrateView({ viewData, entityData, relatedSchemas, relatedContainers }) { - const detailsTab = entityData[0]; - - return { - name: viewData.name, - tableName: viewData.tableName, - keys: viewData.keys, - orReplace: detailsTab.orReplace, - ifNotExist: detailsTab.ifNotExist, - selectStatement: detailsTab.selectStatement, - sqlSecurity: detailsTab.SQL_SECURITY, - algorithm: detailsTab.algorithm, - checkOption: detailsTab.withCheckOption ? detailsTab.checkTestingScope : '', - }; - }, - - commentIfDeactivated(statement, data, isPartOfLine) { - return statement; - }, - }; + const { commentIfDeactivated, checkAllKeysDeactivated, divideIntoActivatedAndDeactivated, hasType, wrap, clean } = + app.utils.general; + const assignTemplates = app.utils.assignTemplates; + const _ = app.require('lodash'); + const { decorateType, decorateDefault } = require('./helpers/columnDefinitionHelper')(_, wrap); + const { getFunctionArguments, wrapInQuotes, getNamePrefixedWithSchemaName, getColumnsList } = + require('./helpers/general')({ + _, + divideIntoActivatedAndDeactivated, + commentIfDeactivated, + }); + const { generateConstraintsString, foreignKeysToString, foreignActiveKeysToString, createKeyConstraint } = + require('./helpers/constraintsHelper')({ + _, + commentIfDeactivated, + checkAllKeysDeactivated, + assignTemplates, + getColumnsList, + wrapInQuotes, + }); + const keyHelper = require('./helpers/keyHelper')(_, clean); + + const { getFunctionsScript } = require('./helpers/functionHelper')({ + _, + templates, + assignTemplates, + getFunctionArguments, + getNamePrefixedWithSchemaName, + }); + + const { getProceduresScript } = require('./helpers/procedureHelper')({ + _, + templates, + assignTemplates, + getFunctionArguments, + getNamePrefixedWithSchemaName, + }); + + const { getTableTemporaryValue, getTableOptions } = require('./helpers/tableHelper')({ + _, + checkAllKeysDeactivated, + getColumnsList, + }); + + const { getUserDefinedType } = require('./helpers/udtHelper')({ + _, + commentIfDeactivated, + assignTemplates, + templates, + getNamePrefixedWithSchemaName, + }); + + return { + createDatabase({ databaseName, ifNotExist, comments, udfs, procedures }) { + const comment = assignTemplates(templates.comment, { + object: 'SCHEMA', + objectName: wrapInQuotes(databaseName), + comment: comments, + }); + + const schemaStatement = assignTemplates(templates.createSchema, { + name: wrapInQuotes(databaseName), + ifNotExist: ifNotExist ? ' IF NOT EXISTS' : '', + comment: comments ? comment : '', + }); + + const createFunctionStatement = getFunctionsScript(databaseName, udfs); + const createProceduresStatement = getProceduresScript(databaseName, procedures); + + return _.trim([schemaStatement, createFunctionStatement, createProceduresStatement].join('\n\n')); + }, + + createTable( + { + name, + columns, + checkConstraints, + foreignKeyConstraints, + dbData, + columnDefinitions, + relatedSchemas, + keyConstraints, + inherits, + description, + ifNotExist, + usingMethod, + on_commit, + partitioning, + storage_parameter, + table_tablespace_name, + temporary, + unlogged, + selectStatement, + }, + isActivated + ) { + const comment = assignTemplates(templates.comment, { + object: 'TABLE', + objectName: getNamePrefixedWithSchemaName(name, dbData.databaseName), + comment: description, + }); + + const dividedKeysConstraints = divideIntoActivatedAndDeactivated( + keyConstraints.map(createKeyConstraint(templates, isActivated)), + key => key.statement + ); + const keyConstraintsString = generateConstraintsString(dividedKeysConstraints, isActivated); + + const dividedForeignKeys = divideIntoActivatedAndDeactivated(foreignKeyConstraints, key => key.statement); + const foreignKeyConstraintsString = generateConstraintsString(dividedForeignKeys, isActivated); + + const tableStatement = assignTemplates(templates.createTable, { + temporary: getTableTemporaryValue(temporary, unlogged), + ifNotExist, + name: getNamePrefixedWithSchemaName(name, dbData.databaseName), + columnDefinitions: '\t' + _.join(columns, ',\n\t'), + keyConstraints: keyConstraintsString, + checkConstraints: !_.isEmpty(checkConstraints) ? ',\n\t' + _.join(checkConstraints, ',\n\t') : '', + foreignKeyConstraints: foreignKeyConstraintsString, + options: getTableOptions({ + inherits, + partitioning, + usingMethod, + on_commit, + storage_parameter, + table_tablespace_name, + selectStatement, + }), + comment: description ? comment : '', + }); + + return tableStatement; + }, + + convertColumnDefinition(columnDefinition) { + const notNull = columnDefinition.nullable ? '' : ' NOT NULL'; + const primaryKey = columnDefinition.primaryKey ? ' PRIMARY KEY' : ''; + const uniqueKey = columnDefinition.unique ? ' UNIQUE' : ''; + const collation = columnDefinition.collationRule ? ` COLLATE "${columnDefinition.collationRule}"` : ''; + const defaultValue = !_.isUndefined(columnDefinition.default) + ? ' DEFAULT ' + decorateDefault(columnDefinition.type, columnDefinition.default) + : ''; + + return commentIfDeactivated( + assignTemplates(templates.columnDefinition, { + name: wrapInQuotes(columnDefinition.name), + type: decorateType(columnDefinition.type, columnDefinition), + notNull, + primaryKey, + uniqueKey, + collation, + defaultValue, + }), + { + isActivated: columnDefinition.isActivated, + } + ); + }, + + createIndex(tableName, index, dbData, isParentActivated = true) { + return ''; + }, + + createCheckConstraint(checkConstraint) { + return assignTemplates(templates.checkConstraint, { + name: checkConstraint.name ? `CONSTRAINT ${wrapInQuotes(checkConstraint.name)}` : '', + expression: _.trim(checkConstraint.expression).replace(/^\(([\s\S]*)\)$/, '$1'), + noInherit: checkConstraint.noInherit ? ' NO INHERIT' : '', + }); + }, + + createForeignKeyConstraint( + { + name, + foreignKey, + primaryTable, + primaryKey, + primaryTableActivated, + foreignTableActivated, + foreignSchemaName, + primarySchemaName, + }, + dbData + ) { + const isAllPrimaryKeysDeactivated = checkAllKeysDeactivated(primaryKey); + const isAllForeignKeysDeactivated = checkAllKeysDeactivated(foreignKey); + const isActivated = + !isAllPrimaryKeysDeactivated && + !isAllForeignKeysDeactivated && + primaryTableActivated && + foreignTableActivated; + + const foreignKeyStatement = assignTemplates(templates.createForeignKeyConstraint, { + primaryTable: getNamePrefixedWithSchemaName(primaryTable, primarySchemaName || dbData.databaseName), + name: name ? `CONSTRAINT ${wrapInQuotes(name)}` : '', + foreignKey: isActivated ? foreignKeysToString(foreignKey) : foreignActiveKeysToString(foreignKey), + primaryKey: isActivated ? foreignKeysToString(primaryKey) : foreignActiveKeysToString(primaryKey), + }); + + return { + statement: _.trim(foreignKeyStatement), + isActivated, + }; + }, + + createForeignKey( + { + name, + foreignTable, + foreignKey, + primaryTable, + primaryKey, + primaryTableActivated, + foreignTableActivated, + foreignSchemaName, + primarySchemaName, + }, + dbData + ) { + const isAllPrimaryKeysDeactivated = checkAllKeysDeactivated(primaryKey); + const isAllForeignKeysDeactivated = checkAllKeysDeactivated(foreignKey); + const isActivated = + !isAllPrimaryKeysDeactivated && + !isAllForeignKeysDeactivated && + primaryTableActivated && + foreignTableActivated; + + const foreignKeyStatement = assignTemplates(templates.createForeignKey, { + primaryTable: getNamePrefixedWithSchemaName(primaryTable, primarySchemaName || dbData.databaseName), + foreignTable: getNamePrefixedWithSchemaName(foreignTable, foreignSchemaName || dbData.databaseName), + name: name ? wrapInQuotes(name) : '', + foreignKey: isActivated ? foreignKeysToString(foreignKey) : foreignActiveKeysToString(foreignKey), + primaryKey: isActivated ? foreignKeysToString(primaryKey) : foreignActiveKeysToString(primaryKey), + }); + + return { + statement: _.trim(foreignKeyStatement), + isActivated, + }; + }, + + createView(viewData, dbData, isActivated) { + return ''; + }, + + createViewIndex(viewName, index, dbData, isParentActivated) { + return ''; + }, + + createUdt(udt, dbData) { + const columns = _.map(udt.properties, this.convertColumnDefinition); + + return getUserDefinedType(udt, columns); + }, + + getDefaultType(type) { + return defaultTypes[type]; + }, + + getTypesDescriptors() { + return types; + }, + + hasType(type) { + return hasType(types, type); + }, + + hydrateColumn({ columnDefinition, jsonSchema, dbData }) { + const collationRule = _.includes(['char', 'varchar', 'text'], columnDefinition.type) + ? jsonSchema.collationRule + : ''; + const timeTypes = ['time', 'timestamp']; + const timePrecision = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.timePrecision : ''; + const with_timezone = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.with_timezone : ''; + const intervalOptions = columnDefinition.type === 'interval' ? jsonSchema.intervalOptions : ''; + + return { + name: columnDefinition.name, + type: columnDefinition.type, + primaryKey: keyHelper.isInlinePrimaryKey(jsonSchema), + unique: keyHelper.isInlineUnique(jsonSchema), + nullable: columnDefinition.nullable, + default: columnDefinition.default, + comment: jsonSchema.description, + isActivated: columnDefinition.isActivated, + scale: columnDefinition.scale, + precision: columnDefinition.precision, + length: columnDefinition.length, + enum: jsonSchema.enum, + array_type: jsonSchema.array_type, + unit: jsonSchema.unit, + rangeSubtype: jsonSchema.rangeSubtype, + operatorClass: jsonSchema.operatorClass, + collation: jsonSchema.collation, + canonicalFunction: jsonSchema.canonicalFunction, + subtypeDiffFunction: jsonSchema.subtypeDiffFunction, + multiRangeType: jsonSchema.multiRangeType, + databaseName: dbData.databaseName, + collationRule, + timePrecision, + with_timezone, + intervalOptions, + }; + }, + + hydrateIndex(indexData, tableData) { + return indexData; + }, + + hydrateViewIndex(indexData) { + return {}; + }, + + hydrateCheckConstraint(checkConstraint) { + return { + name: checkConstraint.chkConstrName, + expression: checkConstraint.constrExpression, + noInherit: checkConstraint.noInherit, + }; + }, + + hydrateDatabase(containerData, data) { + return { + databaseName: containerData.name, + ifNotExist: containerData.ifNotExist, + comments: containerData.description, + udfs: data?.udfs || [], + procedures: data?.procedures || [], + }; + }, + + hydrateTable({ tableData, entityData, jsonSchema }) { + const detailsTab = entityData[0]; + const inheritsTable = _.get(tableData, `relatedSchemas[${detailsTab.inherits}]`, ''); + const partitioning = _.first(detailsTab.partitioning) || {}; + const compositePartitionKey = keyHelper.getKeys(partitioning.compositePartitionKey, jsonSchema); + + return { + ...tableData, + keyConstraints: keyHelper.getTableKeyConstraints(jsonSchema), + inherits: inheritsTable?.code || inheritsTable?.collectionName, + selectStatement: _.trim(detailsTab.selectStatement), + partitioning: _.assign({}, partitioning, { compositePartitionKey }), + ..._.pick( + detailsTab, + 'temporary', + 'unlogged', + 'description', + 'ifNotExist', + 'usingMethod', + 'on_commit', + 'storage_parameter', + 'table_tablespace_name' + ), + }; + }, + + hydrateViewColumn(data) { + return ''; + }, + + hydrateView({ viewData, entityData, relatedSchemas, relatedContainers }) { + return ''; + }, + + commentIfDeactivated(statement, data, isPartOfLine) { + return statement; + }, + }; }; diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index 7f321e4..f9bc3cb 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -1,105 +1,85 @@ module.exports = (_, wrap) => { - const addLength = (type, length) => { - return `${type}(${length})`; - }; - - const addScalePrecision = (type, precision, scale) => { - if (_.isNumber(scale)) { - return `${type}(${precision},${scale})`; - } else { - return `${type}(${precision})`; - } - }; - - const addPrecision = (type, precision) => { - return `${type}(${precision})`; - }; - - const canHaveLength = type => ['CHAR', 'VARCHAR', 'BINARY', 'CHAR BYTE', 'VARBINARY', 'BLOB'].includes(type); - - const isNumeric = type => - [ - 'TINYINT', - 'SMALLINT', - 'MEDIUMINT', - 'INT', - 'INTEGER', - 'BIGINT', - 'INT1', - 'INT2', - 'INT3', - 'INT4', - 'INT8', - 'FLOAT', - 'DOUBLE', - 'REAL', - 'DECIMAL', - 'DEC', - 'NUMERIC', - 'FIXED', - 'NUMBER', - 'DOUBLE PRECISION', - 'BIT', - ].includes(type); - - const canHavePrecision = type => isNumeric(type); - - const canHaveMicrosecondPrecision = type => ['TIME', 'DATETIME', 'TIMESTAMP'].includes(type); - - const canHaveScale = type => - ['DECIMAL', 'FLOAT', 'DOUBLE', 'DEC', 'FIXED', 'NUMERIC', 'NUMBER', 'DOUBLE PRECISION', 'REAL'].includes(type); - - const decorateType = (type, columnDefinition) => { - if (canHaveLength(type) && _.isNumber(columnDefinition.length)) { - return addLength(type, columnDefinition.length); - } else if (canHavePrecision(type) && canHaveScale(type) && _.isNumber(columnDefinition.precision)) { - return addScalePrecision(type, columnDefinition.precision, columnDefinition.scale); - } else if (canHavePrecision(type) && _.isNumber(columnDefinition.precision)) { - return addPrecision(type, columnDefinition.precision); - } else if (canHaveMicrosecondPrecision(type) && _.isNumber(columnDefinition.microSecPrecision)) { - return addPrecision(type, columnDefinition.microSecPrecision); - } else if (['ENUM', 'SET'].includes(type) && !_.isEmpty(columnDefinition.enum)) { - return `${type}('${columnDefinition.enum.join("', '")}')`; - } - - return type; - }; - - const isString = type => ['CHAR', 'VARCHAR', 'TEXT', 'TINYTEXT', 'MEDIUMTEXT', 'LONGTEXT'].includes(_.toUpper(type)); - const isDateTime = type => ['TIME', 'DATE', 'DATETIME', 'TIMESTAMP'].includes(type); - - const escapeQuotes = str => _.trim(str).replace(/(\')+/g, "'$1"); - - const decorateDefault = (type, defaultValue) => { - const constantsValues = ['current_timestamp', 'null']; - if ((isString(type) || isDateTime(type)) && !constantsValues.includes(_.toLower(defaultValue))) { - return wrap(escapeQuotes(defaultValue)); - } else { - return defaultValue; - } - }; - - const canBeNational = type => { - return ['CHAR', 'VARCHAR'].includes(type); - }; - - const getSign = (type, signed) => { - if (!isNumeric(type)) { - return ''; - } - - if (signed === false) { - return ' UNSIGNED'; - } - - return ''; - }; - - return { - decorateType, - decorateDefault, - canBeNational, - isNumeric, - getSign, - }; + const addLength = (type, length) => { + return `${type}(${length})`; + }; + + const addScalePrecision = (type, precision, scale) => { + if (_.isNumber(scale)) { + return `${type}(${precision},${scale})`; + } else { + return `${type}(${precision})`; + } + }; + + const addPrecision = (type, precision) => { + if (_.isNumber(precision)) { + return `${type}(${precision})`; + } + + return type; + }; + + const addWithTimezone = (type, with_timezone) => { + if (with_timezone) { + return `${type} WITH TIME ZONE`; + } + + return type; + }; + + const isNumeric = type => + [ + 'smallint', + 'integer', + 'bigint', + 'numeric', + 'real', + 'double precision', + 'smallserial', + 'serial', + 'bigserial', + 'money', + ].includes(type); + + const canHaveLength = type => ['char', 'varchar', 'bit', 'varbit'].includes(type); + const canHavePrecision = type => isNumeric(type); + const canHaveTimePrecision = type => ['time', 'timestamp'].includes(type); + const canHaveScale = type => type === 'numeric'; + + const decorateType = (type, columnDefinition) => { + if (canHaveLength(type) && _.isNumber(columnDefinition.length)) { + return addLength(type, columnDefinition.length); + } else if (canHavePrecision(type) && canHaveScale(type) && _.isNumber(columnDefinition.precision)) { + return addScalePrecision(type, columnDefinition.precision, columnDefinition.scale); + } else if (canHavePrecision(type) && _.isNumber(columnDefinition.precision)) { + return addPrecision(type, columnDefinition.precision); + } else if ( + canHaveTimePrecision(type) && + (_.isNumber(columnDefinition.timePrecision) || columnDefinition.with_timezone) + ) { + return addWithTimezone(addPrecision(type, columnDefinition.timePrecision), columnDefinition.with_timezone); + } + + return type; + }; + + const isString = type => ['char', 'varchar', 'text', 'bit', 'varbit'].includes(type); + const isDateTime = type => ['date', 'time', 'timestamp', 'interval'].includes(type); + + const escapeQuotes = str => _.trim(str).replace(/(\')+/g, "'$1"); + + const decorateDefault = (type, defaultValue) => { + const constantsValues = ['current_timestamp', 'null']; + if ((isString(type) || isDateTime(type)) && !constantsValues.includes(_.toLower(defaultValue))) { + return wrap(escapeQuotes(defaultValue), '"', '"'); + } else { + return defaultValue; + } + }; + + return { + decorateType, + isNumeric, + decorateDefault, + }; }; diff --git a/forward_engineering/helpers/constraintsHelper.js b/forward_engineering/helpers/constraintsHelper.js index 3bf0785..0ceb134 100644 --- a/forward_engineering/helpers/constraintsHelper.js +++ b/forward_engineering/helpers/constraintsHelper.js @@ -1,82 +1,78 @@ - module.exports = ({ - _, - commentIfDeactivated, - checkAllKeysDeactivated, - divideIntoActivatedAndDeactivated, - assignTemplates, + _, + commentIfDeactivated, + checkAllKeysDeactivated, + assignTemplates, + wrapInQuotes, + getColumnsList, }) => { - const generateConstraintsString = (dividedConstraints, isParentActivated) => { - const deactivatedItemsAsString = commentIfDeactivated((dividedConstraints?.deactivatedItems || []).join(',\n\t'), { - isActivated: !isParentActivated, - isPartOfLine: true, - }); - const activatedConstraints = dividedConstraints?.activatedItems?.length - ? ',\n\t' + dividedConstraints.activatedItems.join(',\n\t') - : ''; - - const deactivatedConstraints = dividedConstraints?.deactivatedItems?.length - ? '\n\t' + deactivatedItemsAsString - : ''; - - return activatedConstraints + deactivatedConstraints; - }; - - const foreignKeysToString = keys => { - if (Array.isArray(keys)) { - const activatedKeys = keys.filter(key => _.get(key, 'isActivated', true)).map(key => `\`${_.trim(key.name)}\``); - const deactivatedKeys = keys - .filter(key => !_.get(key, 'isActivated', true)) - .map(key => `\`${_.trim(key.name)}\``); - const deactivatedKeysAsString = deactivatedKeys.length - ? commentIfDeactivated(deactivatedKeys, { isActivated: false, isPartOfLine: true }) - : ''; - - return activatedKeys.join(', ') + deactivatedKeysAsString; - } - return keys; - }; - - const foreignActiveKeysToString = keys => { - return keys.map(key => _.trim(key.name)).join(', '); - }; - - const createKeyConstraint = (templates, isParentActivated) => keyData => { - const columnMapToString = ({ name, order }) => `\`${name}\` ${order}`.trim(); - - const isAllColumnsDeactivated = checkAllKeysDeactivated(keyData.columns); - const dividedColumns = divideIntoActivatedAndDeactivated(keyData.columns, columnMapToString); - const deactivatedColumnsAsString = dividedColumns?.deactivatedItems?.length - ? commentIfDeactivated(dividedColumns.deactivatedItems.join(', '), { isActivated: false, isPartOfLine: true }) - : ''; - - const columns = - !isAllColumnsDeactivated && isParentActivated - ? ' (' + dividedColumns.activatedItems.join(', ') + deactivatedColumnsAsString + ')' - : ' (' + keyData.columns.map(columnMapToString).join(', ') + ')'; - const using = keyData.category ? ` USING ${keyData.category}` : ''; - const ignore = keyData.ignore ? ` IGNORED` : ''; - const comment = keyData.comment ? ` COMMENT '${keyData.comment}'` : ''; - const blockSize = keyData.blockSize ? ` KEY_BLOCK_SIZE=${keyData.blockSize}` : ''; - - return { - statement: assignTemplates(templates.createKeyConstraint, { - constraintName: keyData.name ? `CONSTRAINT \`${_.trim(keyData.name)}\` ` : '', - keyType: keyData.keyType, - blockSize, - columns, - comment, - ignore, - using, - }), - isActivated: !isAllColumnsDeactivated, - }; - }; - - return { - generateConstraintsString, - foreignKeysToString, - foreignActiveKeysToString, - createKeyConstraint, - }; + const generateConstraintsString = (dividedConstraints, isParentActivated) => { + const deactivatedItemsAsString = commentIfDeactivated( + (dividedConstraints?.deactivatedItems || []).join(',\n\t'), + { + isActivated: !isParentActivated, + isPartOfLine: true, + } + ); + const activatedConstraints = dividedConstraints?.activatedItems?.length + ? ',\n\t' + dividedConstraints.activatedItems.join(',\n\t') + : ''; + + const deactivatedConstraints = dividedConstraints?.deactivatedItems?.length + ? '\n\t' + deactivatedItemsAsString + : ''; + + return activatedConstraints + deactivatedConstraints; + }; + + const foreignKeysToString = keys => { + if (Array.isArray(keys)) { + const activatedKeys = keys + .filter(key => _.get(key, 'isActivated', true)) + .map(key => wrapInQuotes(_.trim(key.name))); + const deactivatedKeys = keys + .filter(key => !_.get(key, 'isActivated', true)) + .map(key => wrapInQuotes(_.trim(key.name))); + const deactivatedKeysAsString = deactivatedKeys.length + ? commentIfDeactivated(deactivatedKeys, { isActivated: false, isPartOfLine: true }) + : ''; + + return activatedKeys.join(', ') + deactivatedKeysAsString; + } + return keys; + }; + + const foreignActiveKeysToString = keys => { + return keys.map(key => _.trim(key.name)).join(', '); + }; + + const createKeyConstraint = (templates, isParentActivated) => keyData => { + const constraintName = wrapInQuotes(_.trim(keyData.name)); + const isAllColumnsDeactivated = checkAllKeysDeactivated(keyData.columns); + const columns = getColumnsList(keyData.columns, isAllColumnsDeactivated, isParentActivated); + const includeNonKey = keyData.includeNonKey + ? ` INCLUDE (${getColumnsList(keyData.include, isAllColumnsDeactivated, isParentActivated)})` + : ''; + const storageParameters = keyData.storageParameters ? ` WITH (${keyData.storageParameters})` : ''; + const tablespace = keyData.tablespace ? ` USING INDEX TABLESPACE ${wrapInQuotes(keyData.tablespace)}` : ''; + + return { + statement: assignTemplates(templates.createKeyConstraint, { + constraintName: keyData.name ? `CONSTRAINT ${wrapInQuotes(constraintName)} ` : '', + keyType: keyData.keyType, + columns, + includeNonKey, + storageParameters, + tablespace, + }), + isActivated: !isAllColumnsDeactivated, + }; + }; + + return { + generateConstraintsString, + foreignKeysToString, + foreignActiveKeysToString, + createKeyConstraint, + }; }; diff --git a/forward_engineering/helpers/functionHelper.js b/forward_engineering/helpers/functionHelper.js new file mode 100644 index 0000000..3cbb2fb --- /dev/null +++ b/forward_engineering/helpers/functionHelper.js @@ -0,0 +1,90 @@ +module.exports = ({ _, templates, assignTemplates, getFunctionArguments, getNamePrefixedWithSchemaName }) => { + const getFunctionsScript = (schemaName, udfs) => { + return _.map(udfs, udf => { + const orReplace = udf.functionOrReplace ? ' OR REPLACE' : ''; + + return assignTemplates(templates.createFunction, { + name: getNamePrefixedWithSchemaName(udf.name, schemaName), + orReplace: orReplace, + parameters: getFunctionArguments(udf.functionArguments), + returnType: udf.functionReturnsSetOf ? 'SETOF' : udf.functionReturnType, + language: udf.functionLanguage, + properties: getProperties(udf), + definition: udf.functionDefinition, + }); + }).join('\n'); + }; + + const getProperties = udf => { + const wrap = value => (value ? `\t${value}\n` : ''); + + return [ + { key: 'functionWindow', getValue: getWindow }, + { key: 'functionVolatility', getValue: getVolatility }, + { key: 'functionLeakProof', getValue: getLeakProof }, + { key: 'functionNullArgs', getValue: getNullArgs }, + { key: 'functionSqlSecurity', getValue: getSqlSecurity }, + { key: 'functionParallel', getValue: getParallel }, + { key: 'functionExecutionCost', getValue: getExecutionCost }, + { key: 'functionExecutionRows', getValue: getExecutionRows }, + { key: 'functionSupportFunction', getValue: getSupportFunction }, + { key: 'functionConfigurationParameters', getValue: getConfigurationParameters }, + ] + .map(config => wrap(config.getValue(udf[config.key], udf))) + .filter(Boolean) + .join(''); + }; + + const getWindow = (value, udf) => { + if (udf.language !== 'c' || !value) { + return ''; + } + + return 'WINDOW'; + }; + const getVolatility = value => value; + const getLeakProof = value => { + if (value) { + return 'LEAKPROOF'; + } + + return 'NOT LEAKPROOF'; + }; + const getNullArgs = value => value; + const getSqlSecurity = value => { + if (value) { + return `SECURITY ${value}`; + } + }; + const getParallel = value => { + if (value) { + return `PARALLEL ${value}`; + } + }; + const getExecutionCost = value => { + if (value) { + return `COST ${value}`; + } + }; + const getExecutionRows = (value, udf) => { + if (!value || udf.functionReturnsSetOf) { + return ''; + } + + return `ROWS ${value}`; + }; + const getSupportFunction = value => { + if (value) { + return `SUPPORT ${value}`; + } + }; + const getConfigurationParameters = value => { + if (value) { + return `SET ${value}`; + } + }; + + return { + getFunctionsScript, + }; +}; diff --git a/forward_engineering/helpers/general.js b/forward_engineering/helpers/general.js index 5d0411c..d3ac934 100644 --- a/forward_engineering/helpers/general.js +++ b/forward_engineering/helpers/general.js @@ -1,309 +1,42 @@ -module.exports = (_, wrap) => { - const OPTION_KEYWORDS = { - 'ENGINE': 'ENGINE', - 'AUTO_INCREMENT': 'AUTO_INCREMENT', - 'AVG_ROW_LENGTH': 'AVG_ROW_LENGTH', - 'CHECKSUM': 'CHECKSUM', - 'DATA_DIRECTORY': 'DATA DIRECTORY', - 'DELAY_KEY_WRITE': 'DELAY_KEY_WRITE', - 'INDEX_DIRECTORY': 'INDEX DIRECTORY', - 'ENCRYPTED': 'ENCRYPTED', - 'ENCRYPTION_KEY_ID': 'ENCRYPTION_KEY_ID', - 'IETF_QUOTES': 'IETF_QUOTES', - 'INSERT_METHOD': 'INSERT_METHOD', - 'UNION': 'UNION', - 'KEY_BLOCK_SIZE': 'KEY_BLOCK_SIZE', - 'MIN_ROWS': 'MIN_ROWS', - 'MAX_ROWS': 'MAX_ROWS', - 'PACK_KEYS': 'PACK_KEYS', - 'PAGE_CHECKSUM': 'PAGE_CHECKSUM', - 'PAGE_COMPRESSED': 'PAGE_COMPRESSED', - 'PAGE_COMPRESSION_LEVEL': 'PAGE_COMPRESSION_LEVEL', - 'ROW_FORMAT': 'ROW_FORMAT', - 'SEQUENCE': 'SEQUENCE', - 'STATS_AUTO_RECALC': 'STATS_AUTO_RECALC', - 'STATS_PERSISTENT': 'STATS_PERSISTENT', - 'TRANSACTIONAL': 'TRANSACTIONAL', - 'WITH_SYSTEM_VERSIONING': 'WITH SYSTEM VERSIONING', - }; - - const OPTIONS_BY_ENGINE = { - 'MyISAM': [ - 'AUTO_INCREMENT', - 'AVG_ROW_LENGTH', - 'CHECKSUM', - 'DATA_DIRECTORY', - 'DELAY_KEY_WRITE', - 'INDEX_DIRECTORY', - 'KEY_BLOCK_SIZE', - 'PACK_KEYS', - 'ROW_FORMAT', - 'WITH_SYSTEM_VERSIONING', - ], - 'InnoDB': [ - 'AUTO_INCREMENT', - 'DATA_DIRECTORY', - 'INDEX_DIRECTORY', - 'ENCRYPTED', - 'ENCRYPTION_KEY_ID', - 'KEY_BLOCK_SIZE', - 'PACK_KEYS', - 'PAGE_COMPRESSED', - 'PAGE_COMPRESSION_LEVEL', - 'ROW_FORMAT', - 'SEQUENCE', - 'STATS_AUTO_RECALC', - 'STATS_PERSISTENT', - 'WITH_SYSTEM_VERSIONING', - ], - 'CSV': ['IETF_QUOTES', 'KEY_BLOCK_SIZE', 'PACK_KEYS', 'WITH_SYSTEM_VERSIONING'], - 'MERGE': ['INSERT_METHOD', 'UNION', 'KEY_BLOCK_SIZE', 'PACK_KEYS', 'WITH_SYSTEM_VERSIONING'], - 'Aria': [ - 'AUTO_INCREMENT', - 'AVG_ROW_LENGTH', - 'CHECKSUM', - 'DATA_DIRECTORY', - 'DELAY_KEY_WRITE', - 'INDEX_DIRECTORY', - 'PAGE_CHECKSUM', - 'ROW_FORMAT', - 'KEY_BLOCK_SIZE', - 'PACK_KEYS', - 'TRANSACTIONAL', - 'WITH_SYSTEM_VERSIONING', - ], - 'Memory': ['AUTO_INCREMENT', 'KEY_BLOCK_SIZE', 'PACK_KEYS', 'WITH_SYSTEM_VERSIONING'], - 'Archive': ['AUTO_INCREMENT', 'KEY_BLOCK_SIZE', 'PACK_KEYS', 'WITH_SYSTEM_VERSIONING'], - }; - - const getTableName = (tableName, schemaName) => { - if (schemaName) { - return `\`${schemaName}\`.\`${tableName}\``; - } else { - return `\`${tableName}\``; - } - }; - - const getOptionValue = (keyword, value) => { - if (['ROW_FORMAT', 'INSERT_METHOD'].includes(keyword)) { - if (value) { - return _.toUpper(value); - } else { - return; - } - } - - if (keyword === 'UNION') { - return value; - } - - if (['YES', 'NO', 'DEFAULT'].includes(_.toUpper(value))) { - return _.toUpper(value); - } - if (typeof value === 'number') { - return value; - } else if (!isNaN(+value) && value) { - return +value; - } else if (typeof value === 'string' && value) { - return wrap(value); - } else if (typeof value === 'boolean') { - return value ? 'YES' : 'NO'; - } - }; - - const getTableOptions = options => { - const tableOptions = []; - const engine = options.ENGINE; - - if (!options.defaultCharSet) { - if (options.characterSet) { - tableOptions.push(`CHARSET=${options.characterSet}`); - } - if (options.collation) { - tableOptions.push(`COLLATE=${options.collation}`); - } - } - - if (engine) { - tableOptions.push(`ENGINE = ${engine}`); - } - - const optionKeywords = OPTIONS_BY_ENGINE[engine] || ['KEY_BLOCK_SIZE', 'PACK_KEYS', 'WITH_SYSTEM_VERSIONING']; - - optionKeywords.forEach(keyword => { - if (keyword === 'WITH_SYSTEM_VERSIONING') { - if (options[keyword]) { - return tableOptions.push(OPTION_KEYWORDS[keyword]); - } else { - return; - } - } - - const value = getOptionValue(keyword, options[keyword]); - - if (value === undefined) { - return; - } - - const option = `${OPTION_KEYWORDS[keyword]} = ${value}`; - - tableOptions.push(option); - }); - - if (!tableOptions.length) { - return ''; - } - - return ' ' + tableOptions.join(',\n\t'); - }; - - const addLinear = linear => (linear ? 'LINEAR ' : ''); - - const getPartitionBy = partitioning => { - if (partitioning.partitionType === 'SYSTEM_TIME') { - let interval = - !isNaN(partitioning.interval) && partitioning.interval ? ` INTERVAL ${partitioning.interval}` : ''; - - if (interval && partitioning.time_unit) { - interval += ` ${partitioning.time_unit}`; - } - - return `SYSTEM_TIME${interval}`; - } - - return `${addLinear(partitioning.LINEAR)}${partitioning.partitionType}(${_.trim( - partitioning.partitioning_expression, - )})`; - }; - - const getSubPartitionBy = partitioning => { - if (!partitioning.subpartitionType) { - return ''; - } - - return `SUBPARTITION BY ${addLinear(partitioning.SUBLINEAR)}${partitioning.subpartitionType}(${_.trim( - partitioning.subpartitioning_expression, - )})`; - }; - - const getPartitionDefinitions = partitioning => { - if (!Array.isArray(partitioning.partition_definitions)) { - return ''; - } - - const partitions = partitioning.partition_definitions - .filter(({ partitionDefinition }) => partitionDefinition) - .map(partitionDefinition => { - const subPartitionDefinitions = partitionDefinition.subpartitionDefinition; - - if (subPartitionDefinitions) { - return partitionDefinition.partitionDefinition + ' ' + wrap(subPartitionDefinitions, '(', ')'); - } else { - return partitionDefinition.partitionDefinition; - } - }) - .join(',\n\t\t'); - - if (!partitions) { - return ''; - } - - return wrap('\n\t\t' + partitions + '\n\t', '(', ')'); - }; - - const getPartitions = partitioning => { - if (!partitioning.partitionType) { - return ''; - } - - const partitionBy = `PARTITION BY ${getPartitionBy(partitioning)}`; - const partitions = partitioning.partitions ? `PARTITIONS ${partitioning.partitions}` : ''; - const subPartitionBy = getSubPartitionBy(partitioning); - const subPartitions = partitioning.subpartitions ? `SUBPARTITIONS ${partitioning.subpartitions}` : ''; - const partitionDefinitions = getPartitionDefinitions(partitioning); - - const result = [partitionBy, partitions, subPartitionBy, subPartitions, partitionDefinitions].filter(Boolean); - - if (!result.length) { - return ''; - } - - return '\n\t' + result.join('\n\t'); - }; - - const getKeyWithAlias = key => { - if (!key) { - return ''; - } - - if (key.alias) { - return `\`${key.name}\` as \`${key.alias}\``; - } else { - return `\`${key.name}\``; - } - }; - - const getViewData = (keys, dbData) => { - if (!Array.isArray(keys)) { - return { tables: [], columns: [] }; - } - - return keys.reduce( - (result, key) => { - if (!key.tableName) { - result.columns.push(getKeyWithAlias(key)); - - return result; - } - - let tableName = `\`${key.tableName}\``; - - if (!result.tables.includes(tableName)) { - result.tables.push(tableName); - } - - result.columns.push({ statement: `${tableName}.${getKeyWithAlias(key)}`, isActivated: key.isActivated }); - - return result; - }, - { - tables: [], - columns: [], - }, - ); - }; - - const getCharacteristics = udfCharacteristics => { - const characteristics = []; - - if (udfCharacteristics.language) { - characteristics.push('LANGUAGE SQL'); - } - - if (udfCharacteristics.deterministic) { - characteristics.push(udfCharacteristics.deterministic); - } - - if (udfCharacteristics.sqlSecurity) { - characteristics.push(`SQL SECURITY ${udfCharacteristics.sqlSecurity}`); - } - - if (udfCharacteristics.comment) { - characteristics.push(`COMMENT ${wrap(escapeQuotes(udfCharacteristics.comment))}`); - } - - return characteristics; - }; +module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated }) => { + const getFunctionArguments = functionArguments => { + return _.map(functionArguments, arg => { + const defaultExpression = arg.defaultExpression ? `DEFAULT ${arg.defaultExpression}` : ''; - const escapeQuotes = (str = '') => { - return str.replace(/(')/gi, '\\$1'); - }; - - return { - getTableName, - getTableOptions, - getPartitions, - getViewData, - getCharacteristics, - }; + return _.trim(`${arg.argumentMode} ${arg.argumentName || ''} ${arg.argumentType} ${defaultExpression}`); + }).join(', '); + }; + + const getNamePrefixedWithSchemaName = (name, schemaName) => { + if (schemaName) { + return `${wrapInQuotes(schemaName)}.${wrapInQuotes(name)}`; + } + + return wrapInQuotes(name); + }; + + const wrapInQuotes = name => (/\s/.test(name) ? `"${name}"` : name); + + const columnMapToString = ({ name }) => wrapInQuotes(name); + + const getColumnsList = (columns, isAllColumnsDeactivated, isParentActivated) => { + const dividedColumns = divideIntoActivatedAndDeactivated(columns, columnMapToString); + const deactivatedColumnsAsString = dividedColumns?.deactivatedItems?.length + ? commentIfDeactivated(dividedColumns.deactivatedItems.join(', '), { + isActivated: false, + isPartOfLine: true, + }) + : ''; + + return !isAllColumnsDeactivated && isParentActivated + ? ' (' + dividedColumns.activatedItems.join(', ') + deactivatedColumnsAsString + ')' + : ' (' + columns.map(columnMapToString).join(', ') + ')'; + }; + + return { + getFunctionArguments, + getNamePrefixedWithSchemaName, + wrapInQuotes, + getColumnsList, + }; }; diff --git a/forward_engineering/helpers/keyHelper.js b/forward_engineering/helpers/keyHelper.js index 2547d89..26bf852 100644 --- a/forward_engineering/helpers/keyHelper.js +++ b/forward_engineering/helpers/keyHelper.js @@ -1,168 +1,158 @@ module.exports = (_, clean) => { - const mapProperties = (jsonSchema, iteratee) => { - return Object.entries(jsonSchema.properties).map(iteratee); - }; - - const isUniqueKey = column => { - if (column.compositeUniqueKey) { - return false; - } else if (!column.unique) { - return false; - } else { - return true; - } - }; - - const isInlineUnique = column => { - return isUniqueKey(column) && _.isEmpty(column.uniqueKeyOptions); - }; - - const isPrimaryKey = column => { - if (column.compositeUniqueKey) { - return false; - } else if (column.compositePrimaryKey) { - return false; - } else if (!column.primaryKey) { - return false; - } else { - return true; - } - }; - - const isInlinePrimaryKey = column => { - return isPrimaryKey(column) && _.isEmpty(column.primaryKeyOptions); - }; - - const getOrder = order => { - if (_.toLower(order) === 'asc') { - return 'ASC'; - } else if (_.toLower(order) === 'desc') { - return 'DESC'; - } else { - return ''; - } - }; - - const hydrateUniqueOptions = (options, columnName, isActivated) => - clean({ - keyType: 'UNIQUE', - name: options['constraintName'], - columns: [ - { - name: columnName, - order: getOrder(options['order']), - isActivated: isActivated, - }, - ], - category: options['indexCategory'], - ignore: options['indexIgnore'], - comment: options['indexComment'], - blockSize: options['indexBlockSize'], - }); - - const hydratePrimaryKeyOptions = (options, columnName, isActivated) => - clean({ - keyType: 'PRIMARY KEY', - name: options['constraintName'], - columns: [ - { - name: columnName, - order: getOrder(options['order']), - isActivated: isActivated, - }, - ], - category: options['indexCategory'], - ignore: options['indexIgnore'], - comment: options['indexComment'], - blockSize: options['indexBlockSize'], - }); - - const findName = (keyId, properties) => { - return Object.keys(properties).find(name => properties[name].GUID === keyId); - }; - - const checkIfActivated = (keyId, properties) => { - return _.get( - Object.values(properties).find(prop => prop.GUID === keyId), - 'isActivated', - true, - ); - }; - - const getKeys = (keys, jsonSchema) => { - return keys.map(key => { - return { - name: findName(key.keyId, jsonSchema.properties), - order: key.type === 'descending' ? 'DESC' : 'ASC', - isActivated: checkIfActivated(key.keyId, jsonSchema.properties), - }; - }); - }; - - const getCompositePrimaryKeys = jsonSchema => { - if (!Array.isArray(jsonSchema.primaryKey)) { - return []; - } - - return jsonSchema.primaryKey - .filter(primaryKey => !_.isEmpty(primaryKey.compositePrimaryKey)) - .map(primaryKey => ({ - ...hydratePrimaryKeyOptions(primaryKey), - columns: getKeys(primaryKey.compositePrimaryKey, jsonSchema), - })); - }; - - const getCompositeUniqueKeys = jsonSchema => { - if (!Array.isArray(jsonSchema.uniqueKey)) { - return []; - } - - return jsonSchema.uniqueKey - .filter(uniqueKey => !_.isEmpty(uniqueKey.compositeUniqueKey)) - .map(uniqueKey => ({ - ...hydrateUniqueOptions(uniqueKey), - columns: getKeys(uniqueKey.compositeUniqueKey, jsonSchema), - })); - }; - - const getTableKeyConstraints = ({ jsonSchema }) => { - if (!jsonSchema.properties) { - return []; - } - - const primaryKeyConstraints = mapProperties(jsonSchema, ([ name, schema ]) => { - if (!isPrimaryKey(schema)) { - return; - } else if (_.isEmpty(schema.primaryKeyOptions)) { - return; - } - - return hydratePrimaryKeyOptions(schema.primaryKeyOptions, name, schema.isActivated); - }).filter(Boolean); - - const uniqueKeyConstraints = _.flatten(mapProperties(jsonSchema, ([ name, schema ]) => { - if (!isUniqueKey(schema)) { - return []; - } else if (_.isEmpty(schema.uniqueKeyOptions) || !Array.isArray(schema.uniqueKeyOptions)) { - return []; - } - - return schema.uniqueKeyOptions.map(uniqueKey => ( - hydrateUniqueOptions(uniqueKey, name, schema.isActivated) - )); - })).filter(Boolean); - - return [ - ...primaryKeyConstraints, - ...getCompositePrimaryKeys(jsonSchema), - ...uniqueKeyConstraints, - ...getCompositeUniqueKeys(jsonSchema), - ]; - }; - - return { - getTableKeyConstraints, - isInlineUnique, - isInlinePrimaryKey, - }; + const mapProperties = (jsonSchema, iteratee) => { + return Object.entries(jsonSchema.properties).map(iteratee); + }; + + const isUniqueKey = column => { + if (column.compositeUniqueKey) { + return false; + } else if (!column.unique) { + return false; + } else { + return true; + } + }; + + const isInlineUnique = column => { + return isUniqueKey(column) && _.isEmpty(column.uniqueKeyOptions); + }; + + const isPrimaryKey = column => { + if (column.compositeUniqueKey) { + return false; + } else if (column.compositePrimaryKey) { + return false; + } else if (!column.primaryKey) { + return false; + } else { + return true; + } + }; + + const isInlinePrimaryKey = column => { + return isPrimaryKey(column) && _.isEmpty(column.primaryKeyOptions); + }; + + const hydrateUniqueOptions = (options, columnName, isActivated) => + clean({ + keyType: 'UNIQUE', + name: options['constraintName'], + columns: [ + { + name: columnName, + isActivated: isActivated, + }, + ], + include: options['include'], + storageParameters: options['indexStorageParameters'], + comment: options['indexComment'], + tablespace: options['indexTablespace'], + }); + + const hydratePrimaryKeyOptions = (options, columnName, isActivated) => + clean({ + keyType: 'PRIMARY KEY', + name: options['constraintName'], + columns: [ + { + name: columnName, + isActivated: isActivated, + }, + ], + include: options['include'], + storageParameters: options['indexStorageParameters'], + comment: options['indexComment'], + tablespace: options['indexTablespace'], + }); + + const findName = (keyId, properties) => { + return Object.keys(properties).find(name => properties[name].GUID === keyId); + }; + + const checkIfActivated = (keyId, properties) => { + return _.get( + Object.values(properties).find(prop => prop.GUID === keyId), + 'isActivated', + true + ); + }; + + const getKeys = (keys, jsonSchema) => { + return _.map(keys, key => { + return { + name: findName(key.keyId, jsonSchema.properties), + isActivated: checkIfActivated(key.keyId, jsonSchema.properties), + }; + }); + }; + + const getCompositePrimaryKeys = jsonSchema => { + if (!Array.isArray(jsonSchema.primaryKey)) { + return []; + } + + return jsonSchema.primaryKey + .filter(primaryKey => !_.isEmpty(primaryKey.compositePrimaryKey)) + .map(primaryKey => ({ + ...hydratePrimaryKeyOptions(primaryKey), + columns: getKeys(primaryKey.compositePrimaryKey, jsonSchema), + })); + }; + + const getCompositeUniqueKeys = jsonSchema => { + if (!Array.isArray(jsonSchema.uniqueKey)) { + return []; + } + + return jsonSchema.uniqueKey + .filter(uniqueKey => !_.isEmpty(uniqueKey.compositeUniqueKey)) + .map(uniqueKey => ({ + ...hydrateUniqueOptions(uniqueKey), + columns: getKeys(uniqueKey.compositeUniqueKey, jsonSchema), + })); + }; + + const getTableKeyConstraints = jsonSchema => { + if (!jsonSchema.properties) { + return []; + } + + const primaryKeyConstraints = mapProperties(jsonSchema, ([name, schema]) => { + if (!isPrimaryKey(schema)) { + return; + } else if (_.isEmpty(schema.primaryKeyOptions)) { + return; + } + + return hydratePrimaryKeyOptions(schema.primaryKeyOptions, name, schema.isActivated); + }).filter(Boolean); + + const uniqueKeyConstraints = _.flatten( + mapProperties(jsonSchema, ([name, schema]) => { + if (!isUniqueKey(schema)) { + return []; + } else if (_.isEmpty(schema.uniqueKeyOptions) || !Array.isArray(schema.uniqueKeyOptions)) { + return []; + } + + return schema.uniqueKeyOptions.map(uniqueKey => + hydrateUniqueOptions(uniqueKey, name, schema.isActivated) + ); + }) + ).filter(Boolean); + + return [ + ...primaryKeyConstraints, + ...getCompositePrimaryKeys(jsonSchema), + ...uniqueKeyConstraints, + ...getCompositeUniqueKeys(jsonSchema), + ]; + }; + + return { + getTableKeyConstraints, + isInlineUnique, + isInlinePrimaryKey, + getKeys, + }; }; diff --git a/forward_engineering/helpers/procedureHelper.js b/forward_engineering/helpers/procedureHelper.js new file mode 100644 index 0000000..6469d57 --- /dev/null +++ b/forward_engineering/helpers/procedureHelper.js @@ -0,0 +1,18 @@ +module.exports = ({ _, templates, assignTemplates, getFunctionArguments, getNamePrefixedWithSchemaName }) => { + const getProceduresScript = (schemaName, procedures) => { + return _.map(procedures, procedure => { + const orReplace = procedure.orReplace ? ' OR REPLACE' : ''; + + return assignTemplates(templates.createProcedure, { + name: getNamePrefixedWithSchemaName(procedure.name, schemaName), + orReplace: orReplace, + parameters: getFunctionArguments(procedure.inputArgs), + language: procedure.language, + body: procedure.body, + }); + }).join('\n'); + }; + return { + getProceduresScript, + }; +}; diff --git a/forward_engineering/helpers/tableHelper.js b/forward_engineering/helpers/tableHelper.js new file mode 100644 index 0000000..39bd99f --- /dev/null +++ b/forward_engineering/helpers/tableHelper.js @@ -0,0 +1,127 @@ +module.exports = ({ _, getColumnsList, checkAllKeysDeactivated }) => { + const getTableTemporaryValue = (temporary, unlogged) => { + if (temporary) { + return ' TEMPORARY'; + } + + if (unlogged) { + return ' UNLOGGED'; + } + + return ''; + }; + + const getTableOptions = tableData => { + const wrap = value => (value ? `${value}\n` : ''); + + const statements = [ + { key: 'inherits', getValue: getBasicValue('INHERITS') }, + { key: 'partitioning', getValue: getPartitioning }, + { key: 'usingMethod', getValue: getBasicValue('USING') }, + { key: 'on_commit', getValue: getOnCommit }, + { key: 'table_tablespace_name', getValue: getBasicValue('TABLESPACE') }, + { key: 'storage_parameter', getValue: getStorageParameters }, + { key: 'selectStatement', getValue: getBasicValue('AS') }, + ] + .map(config => wrap(config.getValue(tableData[config.key], tableData))) + .filter(Boolean) + .join(''); + + return _.trim(statements) ? ` ${_.trim(statements)}` : ''; + }; + + const getPartitioning = (value, { isActivated }) => { + if (value && value.partitionMethod) { + const expression = + value.partitionBy === 'keys' + ? getPartitionKeys(value, isActivated) + : ` (${value.partitioning_expression})`; + + return `PARTITION BY ${value.partitionMethod}${expression}`; + } + }; + + const getPartitionKeys = (value, isParentActivated) => { + const isAllColumnsDeactivated = checkAllKeysDeactivated(value.compositePartitionKey); + + return getColumnsList(value.compositePartitionKey, isAllColumnsDeactivated, isParentActivated); + }; + + const getOnCommit = (value, table) => { + if (value && table.temporary) { + return `ON COMMIT ${value}`; + } + }; + + const getBasicValue = prefix => value => { + if (value) { + return `${prefix} ${value}`; + } + }; + + const toastKeys = [ + 'toast_autovacuum_enabled', + 'toast_tuple_target', + 'toast_vacuum_index_cleanup', + 'toast_vacuum_truncate', + 'toast_autovacuum_vacuum_threshold', + 'toast_autovacuum_vacuum_scale_factor', + 'toast_autovacuum_vacuum_insert_threshold', + 'toast_autovacuum_vacuum_insert_scale_factor', + 'toast_autovacuum_vacuum_cost_delay', + 'toast_autovacuum_vacuum_cost_limit', + 'toast_autovacuum_freeze_min_age', + 'toast_autovacuum_freeze_max_age', + 'toast_autovacuum_freeze_table_age', + 'toast_autovacuum_multixact_freeze_min_age', + 'toast_autovacuum_multixact_freeze_max_age', + 'toast_autovacuum_multixact_freeze_table_age', + 'toast_log_autovacuum_min_duration', + ]; + + const getStorageParameters = value => { + if (_.isEmpty(value)) { + return ''; + } + + const keysToSkip = ['autovacuum', 'toast', 'id']; + + return _.chain(value) + .thru(value => (value.autovacuum_enabled ? value : _.omit(value, 'autovacuum'))) + .thru(value => (value.toast_autovacuum_enabled ? value : _.omit(value, 'toast'))) + .toPairs() + .flatMap(([key, value]) => { + if (key === 'autovacuum' || key === 'toast') { + return _.toPairs(value); + } + + return [[key, value]]; + }) + .reject(([key]) => _.includes(keysToSkip, key)) + .map(([key, value]) => { + if (!value && value !== 0) { + return; + } + + if (_.includes(toastKeys, key)) { + return `toast.${key.slice('toast_'.length)}=${value}`; + } + + return `${key}=${value}`; + }) + .compact() + .join(',\n\t') + .trim() + .thru(storageParameters => { + if (storageParameters) { + return `WITH (\n\t${storageParameters}\n)`; + } + }) + .value(); + }; + + return { + getTableTemporaryValue, + getTableOptions, + }; +}; diff --git a/forward_engineering/helpers/udtHelper.js b/forward_engineering/helpers/udtHelper.js new file mode 100644 index 0000000..0a5102a --- /dev/null +++ b/forward_engineering/helpers/udtHelper.js @@ -0,0 +1,65 @@ +module.exports = ({ _, commentIfDeactivated, assignTemplates, templates, getNamePrefixedWithSchemaName }) => { + const getPlainUdt = (udt, columns) => { + const udtName = getNamePrefixedWithSchemaName(udt.name, udt.databaseName); + const comment = assignTemplates(templates.comment, { + object: 'TYPE', + objectName: udtName, + comment: udt.comment, + }); + + switch (udt.type) { + case 'composite': + return assignTemplates(templates.createCompositeType, { + name: udtName, + columnDefinitions: _.join(columns, ',\n\t'), + comment: udt.comment ? comment : '', + }); + case 'enum': + return assignTemplates(templates.createEnumType, { + name: udtName, + values: _.map(udt.enum, value => `'${value}'`).join(', '), + comment: udt.comment ? comment : '', + }); + case 'range_udt': + return assignTemplates(templates.createRangeType, { + name: udtName, + subtype: udt.rangeSubtype, + options: getRangeOptions(udt), + comment: udt.comment ? comment : '', + }); + default: + return ''; + } + }; + + const getRangeOptions = udt => { + const wrap = value => (value ? `\t${value}` : ''); + + const statements = [ + { key: 'operatorClass', getValue: getBasicValue('SUBTYPE_OPCLASS') }, + { key: 'collation', getValue: getBasicValue('COLLATION') }, + { key: 'canonicalFunction', getValue: getBasicValue('CANONICAL') }, + { key: 'subtypeDiffFunction', getValue: getBasicValue('SUBTYPE_DIFF') }, + { key: 'multiRangeType', getValue: getBasicValue('MULTIRANGE_TYPE_NAME') }, + ] + .map(config => wrap(config.getValue(udt[config.key]))) + .filter(Boolean) + .join(',\n'); + + return _.trim(statements) ? ',\n\t' + _.trim(statements) : ''; + }; + + const getBasicValue = prefix => value => { + if (value) { + return `${prefix}=${value}`; + } + }; + + const getUserDefinedType = (udt, columns) => { + return commentIfDeactivated(getPlainUdt(udt, columns), { + isActivated: udt.isActivated, + }); + }; + + return { getUserDefinedType }; +}; From 45bd89047a082de1f8de9b16dd5d51899eeb2ca9 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 16:51:41 +0300 Subject: [PATCH 33/69] FE: added FE of table indexes --- forward_engineering/configs/templates.js | 4 +- forward_engineering/ddlProvider.js | 34 +++++++- forward_engineering/helpers/general.js | 6 +- forward_engineering/helpers/indeexHelper.js | 87 +++++++++++++++++++++ 4 files changed, 125 insertions(+), 6 deletions(-) create mode 100644 forward_engineering/helpers/indeexHelper.js diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 91ba782..0869688 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -20,8 +20,8 @@ module.exports = { 'ALTER TABLE IF EXISTS ${foreignTable} ADD CONSTRAINT ${name} FOREIGN KEY (${foreignKey}) REFERENCES ${primaryTable}(${primaryKey});', index: - 'CREATE ${indexType}INDEX ${ifNotExist}${name}${indexCategory}\n' + - '\tON ${table} ( ${keys} )${indexOptions};\n', + 'CREATE${unique} INDEX${concurrently}${ifNotExist} ${name}\n' + + ' ON${only} ${tableName}${using}${keys}${options};\n', createView: 'CREATE ${orReplace}${algorithm}${sqlSecurity}VIEW ${ifNotExist}${name} AS ${selectStatement}${checkOption};\n', diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 28deded..03e7712 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -55,6 +55,13 @@ module.exports = (baseProvider, options, app) => { getNamePrefixedWithSchemaName, }); + const { getIndexKeys, getIndexOptions } = require('./helpers/indeexHelper')({ + _, + wrapInQuotes, + checkAllKeysDeactivated, + getColumnsList, + }); + return { createDatabase({ databaseName, ifNotExist, comments, udfs, procedures }) { const comment = assignTemplates(templates.comment, { @@ -163,7 +170,32 @@ module.exports = (baseProvider, options, app) => { }, createIndex(tableName, index, dbData, isParentActivated = true) { - return ''; + const name = wrapInQuotes(index.indxName); + const unique = index.unique && index.index_method === 'btree' ? ' UNIQUE' : ''; + const concurrently = index.concurrently ? ' CONCURRENTLY' : ''; + const ifNotExist = index.ifNotExist ? ' IF NOT EXISTS' : ''; + const only = index.only ? ' ONLY' : ''; + const using = index.index_method ? ` USING ${_.toUpper(index.index_method)}` : ''; + + const keys = getIndexKeys(index.columns, isParentActivated); + const options = getIndexOptions(index, isParentActivated); + + return commentIfDeactivated( + assignTemplates(templates.index, { + unique, + concurrently, + ifNotExist, + name, + only, + using, + keys, + options, + tableName: getNamePrefixedWithSchemaName(tableName, dbData.databaseName), + }), + { + isActivated: index.isActivated, + } + ); }, createCheckConstraint(checkConstraint) { diff --git a/forward_engineering/helpers/general.js b/forward_engineering/helpers/general.js index d3ac934..9e32b4b 100644 --- a/forward_engineering/helpers/general.js +++ b/forward_engineering/helpers/general.js @@ -19,8 +19,8 @@ module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated } const columnMapToString = ({ name }) => wrapInQuotes(name); - const getColumnsList = (columns, isAllColumnsDeactivated, isParentActivated) => { - const dividedColumns = divideIntoActivatedAndDeactivated(columns, columnMapToString); + const getColumnsList = (columns, isAllColumnsDeactivated, isParentActivated, mapColumn = columnMapToString) => { + const dividedColumns = divideIntoActivatedAndDeactivated(columns, mapColumn); const deactivatedColumnsAsString = dividedColumns?.deactivatedItems?.length ? commentIfDeactivated(dividedColumns.deactivatedItems.join(', '), { isActivated: false, @@ -30,7 +30,7 @@ module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated } return !isAllColumnsDeactivated && isParentActivated ? ' (' + dividedColumns.activatedItems.join(', ') + deactivatedColumnsAsString + ')' - : ' (' + columns.map(columnMapToString).join(', ') + ')'; + : ' (' + columns.map(mapColumn).join(', ') + ')'; }; return { diff --git a/forward_engineering/helpers/indeexHelper.js b/forward_engineering/helpers/indeexHelper.js new file mode 100644 index 0000000..65e1c23 --- /dev/null +++ b/forward_engineering/helpers/indeexHelper.js @@ -0,0 +1,87 @@ +module.exports = ({ _, wrapInQuotes, checkAllKeysDeactivated, getColumnsList }) => { + const mapIndexKey = ({ name, sortOrder, nullsOrder, collation, opclass }) => { + const nullsOrderStr = nullsOrder ? ` ${nullsOrder}` : ''; + const collationStr = collation ? ` COLLATE "${collation}"` : ''; + const opclassStr = opclass ? ` ${opclass}` : ''; + + return `${wrapInQuotes(name)}${collationStr}${opclassStr} ${sortOrder}${nullsOrderStr}`; + }; + + const getIndexKeys = (columns = [], isParentActivated) => { + const isAllColumnsDeactivated = checkAllKeysDeactivated(columns); + + return getColumnsList(columns, isAllColumnsDeactivated, isParentActivated, mapIndexKey); + }; + + const getIndexOptions = (index, isParentActivated) => { + const includeKeys = getColumnsList( + index.include || [], + checkAllKeysDeactivated(index.include || []), + isParentActivated + ); + const include = _.trim(includeKeys) ? ` INCLUDE ${includeKeys}` : ''; + const withOptionsString = getWithOptions(index); + const withOptions = withOptionsString ? ` WITH (\n\t${withOptionsString})` : ''; + const tableSpace = index.index_tablespace_name ? ` TABLESPACE ${index.index_tablespace_name}` : ''; + const whereExpression = index.where ? ` WHERE ${index.where}` : ''; + + return _.compact([' ', include, withOptions, tableSpace, whereExpression]).join('\n'); + }; + + const INDEX_STORAGE_OPTIONS_BY_METHOD = { + btree: { + index_fillfactor: 'fillfactor', + deduplicate_items: 'deduplicate_items', + }, + hash: { + index_fillfactor: 'fillfactor', + }, + spgist: { + index_fillfactor: 'fillfactor', + }, + gist: { + index_fillfactor: 'fillfactor', + index_buffering: 'buffering', + }, + gin: { + fastupdate: 'fastupdate', + gin_pending_list_limit: 'gin_pending_list_limit', + }, + brin: { + pages_per_range: 'pages_per_range', + autosummarize: 'autosummarize', + }, + }; + + const getWithOptions = index => { + const config = INDEX_STORAGE_OPTIONS_BY_METHOD[index.index_method]; + + return _.chain(config) + .toPairs() + .map(([keyInModel, postgresKey]) => { + const value = index.index_storage_parameter[keyInModel]; + + if (_.isNil(value) || value === '') { + return; + } + + return `${postgresKey}=${getValue(value)}`; + }) + .compact() + .join(',\n\t') + .value(); + }; + + const getValue = value => { + if (_.isBoolean(value)) { + return value ? 'ON' : 'OFF'; + } + + return value; + }; + + return { + getIndexKeys, + getIndexOptions, + }; +}; From ee1d853e7501dba3e7d2f06f7b680678b1894463 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 17:41:06 +0300 Subject: [PATCH 34/69] Configs: fixed view options --- .../view_level/viewLevelConfig.json | 25 ++++++++++++++++--- .../helpers/postgresHelpers/viewHelper.js | 4 ++- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/properties_pane/view_level/viewLevelConfig.json b/properties_pane/view_level/viewLevelConfig.json index 2a31af4..b489462 100644 --- a/properties_pane/view_level/viewLevelConfig.json +++ b/properties_pane/view_level/viewLevelConfig.json @@ -142,11 +142,28 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "View option", - "propertyKeyword": "view_option", - "propertyType": "details", + "propertyKeyword": "viewOptions", + "propertyType": "block", "propertyTooltip": "This clause specifies optional parameters for a view", - "template": "textarea", - "markdown": false + "structure": [ + { + "propertyName": "Check testing scope", + "propertyKeyword": "check_option", + "propertyTooltip": "This option controls the behavior of automatically updatable views. When this option is specified, INSERT and UPDATE commands on the view will be checked to ensure that new rows satisfy the view-defining condition (that is, the new rows are checked to ensure that they are visible through the view). If they are not, the update will be rejected.", + "propertyType": "select", + "options": [ + "", + "local", + "cascaded" + ] + }, + { + "propertyName": "Security barrier", + "propertyKeyword": "security_barrier", + "propertyType": "checkbox", + "propertyTooltip": "This should be used if the view is intended to provide row-level security." + } + ] }, { "propertyName": "As query", diff --git a/reverse_engineering/helpers/postgresHelpers/viewHelper.js b/reverse_engineering/helpers/postgresHelpers/viewHelper.js index 143d7ce..3c4b23f 100644 --- a/reverse_engineering/helpers/postgresHelpers/viewHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/viewHelper.js @@ -21,7 +21,7 @@ const prepareViewData = (viewData, viewOptions) => { const data = { withCheckOption: Boolean(viewData.check_option), checkTestingScope: getCheckTestingScope(viewData.check_option), - view_option: _.join(viewOptions?.view_options, ','), + viewOptions: _.fromPairs(_.map(viewOptions?.view_options, splitByEqualitySymbol)), temporary: viewOptions?.persistence === 't', recursive: isViewRecursive(viewData), }; @@ -41,6 +41,8 @@ const isViewRecursive = viewData => { return _.startsWith(_.trim(viewData.view_definition), 'WITH RECURSIVE'); }; +const splitByEqualitySymbol = item => _.split(item, '='); + module.exports = { setDependencies, isViewByTableType, From a53a0777eff51a165547e38e2518cc8bf432d870 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 17:53:44 +0300 Subject: [PATCH 35/69] RE: fixes view re fixes --- reverse_engineering/helpers/postgresHelpers/common.js | 2 +- reverse_engineering/helpers/postgresHelpers/viewHelper.js | 3 ++- reverse_engineering/helpers/queryConstants.js | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/reverse_engineering/helpers/postgresHelpers/common.js b/reverse_engineering/helpers/postgresHelpers/common.js index 374fc32..5d79ca5 100644 --- a/reverse_engineering/helpers/postgresHelpers/common.js +++ b/reverse_engineering/helpers/postgresHelpers/common.js @@ -7,7 +7,7 @@ const setDependencies = app => { const clearEmptyPropertiesInObject = obj => _.chain(obj) .toPairs() - .filter(([key, value]) => Boolean(value)) + .reject(([key, value]) => _.isNil(value)) .fromPairs() .value(); diff --git a/reverse_engineering/helpers/postgresHelpers/viewHelper.js b/reverse_engineering/helpers/postgresHelpers/viewHelper.js index 3c4b23f..338af58 100644 --- a/reverse_engineering/helpers/postgresHelpers/viewHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/viewHelper.js @@ -19,11 +19,12 @@ const generateCreateViewScript = (viewName, viewData) => { const prepareViewData = (viewData, viewOptions) => { const data = { - withCheckOption: Boolean(viewData.check_option), + withCheckOption: viewData.check_option !== 'NONE' || _.isNil(viewData.check_option), checkTestingScope: getCheckTestingScope(viewData.check_option), viewOptions: _.fromPairs(_.map(viewOptions?.view_options, splitByEqualitySymbol)), temporary: viewOptions?.persistence === 't', recursive: isViewRecursive(viewData), + description: viewOptions?.description, }; return clearEmptyPropertiesInObject(data); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 18b476f..b184d7d 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -137,7 +137,8 @@ const queryConstants = { GET_VIEW_DATA: `SELECT * FROM information_schema.views WHERE table_name = $1 AND table_schema = $2;`, GET_VIEW_OPTIONS: ` SELECT reloptions AS view_options, - relpersistence AS persistence + relpersistence AS persistence, + obj_description(oid, 'pg_class') AS description FROM pg_catalog.pg_class WHERE relname = $1 AND relnamespace = $2;`, GET_FUNCTIONS_WITH_PROCEDURES: ` From f3bf65396782156447be7a33fb49b0c24a5c3ba4 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 17:54:06 +0300 Subject: [PATCH 36/69] FE: added views fe --- forward_engineering/configs/templates.js | 3 +- forward_engineering/ddlProvider.js | 98 ++++++++++++++++++++++-- forward_engineering/helpers/general.js | 46 +++++++++++ 3 files changed, 139 insertions(+), 8 deletions(-) diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 0869688..4509028 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -23,8 +23,7 @@ module.exports = { 'CREATE${unique} INDEX${concurrently}${ifNotExist} ${name}\n' + ' ON${only} ${tableName}${using}${keys}${options};\n', - createView: - 'CREATE ${orReplace}${algorithm}${sqlSecurity}VIEW ${ifNotExist}${name} AS ${selectStatement}${checkOption};\n', + createView: 'CREATE${orReplace}${temporary} VIEW ${name}${withOptions}\nAS ${selectStatement}${checkOption};\n${comment}\n', viewSelectStatement: 'SELECT ${keys}\n\tFROM ${tableName}', diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 03e7712..19ac24a 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -3,12 +3,19 @@ const types = require('./configs/types'); const templates = require('./configs/templates'); module.exports = (baseProvider, options, app) => { - const { commentIfDeactivated, checkAllKeysDeactivated, divideIntoActivatedAndDeactivated, hasType, wrap, clean } = - app.utils.general; + const { + tab, + commentIfDeactivated, + checkAllKeysDeactivated, + divideIntoActivatedAndDeactivated, + hasType, + wrap, + clean, + } = app.utils.general; const assignTemplates = app.utils.assignTemplates; const _ = app.require('lodash'); const { decorateType, decorateDefault } = require('./helpers/columnDefinitionHelper')(_, wrap); - const { getFunctionArguments, wrapInQuotes, getNamePrefixedWithSchemaName, getColumnsList } = + const { getFunctionArguments, wrapInQuotes, getNamePrefixedWithSchemaName, getColumnsList, getViewData } = require('./helpers/general')({ _, divideIntoActivatedAndDeactivated, @@ -277,7 +284,68 @@ module.exports = (baseProvider, options, app) => { }, createView(viewData, dbData, isActivated) { - return ''; + const viewName = getNamePrefixedWithSchemaName(viewData.name, dbData.databaseName); + + const comment = assignTemplates(templates.comment, { + object: 'VIEW', + objectName: viewName, + comment: viewData.comment, + }); + + const allDeactivated = checkAllKeysDeactivated(viewData.keys || []); + const deactivatedWholeStatement = allDeactivated || !isActivated; + const { columns, tables } = getViewData(viewData.keys, dbData); + let columnsAsString = columns.map(column => column.statement).join(',\n\t\t'); + + if (!deactivatedWholeStatement) { + const dividedColumns = divideIntoActivatedAndDeactivated(columns, column => column.statement); + const deactivatedColumnsString = dividedColumns.deactivatedItems.length + ? commentIfDeactivated(dividedColumns.deactivatedItems.join(',\n\t\t'), { + isActivated: false, + isPartOfLine: true, + }) + : ''; + columnsAsString = dividedColumns.activatedItems.join(',\n\t\t') + deactivatedColumnsString; + } + + const selectStatement = _.trim(viewData.selectStatement) + ? _.trim(tab(viewData.selectStatement)) + : assignTemplates(templates.viewSelectStatement, { + tableName: tables.join(', '), + keys: columnsAsString, + }); + + const check_option = viewData.viewOptions?.check_option + ? `check_option=${viewData.viewOptions?.check_option}` + : ''; + const security_barrier = viewData.viewOptions?.security_barrier ? `security_barrier` : ''; + const withOptions = + check_option || security_barrier + ? `\n\tWITH (${_.compact([check_option, security_barrier]).join(',')})` + : ''; + + const getCheckOption = viewData => { + if (viewData.withCheckOption && viewData.checkTestingScope) { + return `\n\tWITH ${viewData.checkTestingScope} CHECK OPTION`; + } else if (viewData.withCheckOption) { + return '\n\tWITH CHECK OPTION'; + } else { + return ''; + } + }; + + return commentIfDeactivated( + assignTemplates(templates.createView, { + name: viewName, + orReplace: viewData.orReplace ? ' OR REPLACE' : '', + temporary: viewData.temporary ? ' TEMPORARY' : '', + checkOption: getCheckOption(viewData), + comment: viewData.comment ? comment : '', + withOptions, + selectStatement, + }), + { isActivated: !deactivatedWholeStatement } + ); }, createViewIndex(viewName, index, dbData, isParentActivated) { @@ -393,11 +461,29 @@ module.exports = (baseProvider, options, app) => { }, hydrateViewColumn(data) { - return ''; + return { + name: data.name, + tableName: data.entityName, + alias: data.alias, + isActivated: data.isActivated, + }; }, hydrateView({ viewData, entityData, relatedSchemas, relatedContainers }) { - return ''; + const detailsTab = entityData[0]; + + return { + name: viewData.name, + keys: viewData.keys, + comment: detailsTab.description, + orReplace: detailsTab.orReplace, + temporary: detailsTab.temporary, + recursive: detailsTab.recursive, + viewOptions: detailsTab.viewOptions, + selectStatement: detailsTab.selectStatement, + withCheckOption: detailsTab.withCheckOption, + checkTestingScope: detailsTab.withCheckOption ? detailsTab.checkTestingScope : '', + }; }, commentIfDeactivated(statement, data, isPartOfLine) { diff --git a/forward_engineering/helpers/general.js b/forward_engineering/helpers/general.js index 9e32b4b..5243353 100644 --- a/forward_engineering/helpers/general.js +++ b/forward_engineering/helpers/general.js @@ -33,10 +33,56 @@ module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated } : ' (' + columns.map(mapColumn).join(', ') + ')'; }; + const getKeyWithAlias = key => { + if (!key) { + return ''; + } + + if (key.alias) { + return `${wrapInQuotes(key.name)} as ${wrapInQuotes(key.alias)}`; + } else { + return wrapInQuotes(key.name); + } + }; + + const getViewData = keys => { + if (!Array.isArray(keys)) { + return { tables: [], columns: [] }; + } + + return keys.reduce( + (result, key) => { + if (!key.tableName) { + result.columns.push(getKeyWithAlias(key)); + + return result; + } + + let tableName = wrapInQuotes(key.tableName); + + if (!result.tables.includes(tableName)) { + result.tables.push(tableName); + } + + result.columns.push({ + statement: `${tableName}.${getKeyWithAlias(key)}`, + isActivated: key.isActivated, + }); + + return result; + }, + { + tables: [], + columns: [], + } + ); + }; + return { getFunctionArguments, getNamePrefixedWithSchemaName, wrapInQuotes, getColumnsList, + getViewData, }; }; From 32c6c933978cf5a0c21d0afda5d2fc6978bc9c24 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 11 Oct 2021 18:10:34 +0300 Subject: [PATCH 37/69] FE: added FE of comments on table columns --- forward_engineering/configs/templates.js | 5 +++-- forward_engineering/ddlProvider.js | 20 +++++++++++++++---- .../helpers/columnDefinitionHelper.js | 19 +++++++++++++++++- .../{indeexHelper.js => indexHelper.js} | 0 4 files changed, 37 insertions(+), 7 deletions(-) rename forward_engineering/helpers/{indeexHelper.js => indexHelper.js} (100%) diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 4509028..828bd5d 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -6,7 +6,7 @@ module.exports = { createTable: 'CREATE${temporary} TABLE${ifNotExist} ${name} (\n' + '${columnDefinitions}${keyConstraints}${checkConstraints}${foreignKeyConstraints}\n' + - ')${options};\n${comment}\n', + ')${options};\n${comment}${columnDescriptions}\n', columnDefinition: '${name} ${type}${collation}${primaryKey}${uniqueKey}${defaultValue}${notNull}', @@ -23,7 +23,8 @@ module.exports = { 'CREATE${unique} INDEX${concurrently}${ifNotExist} ${name}\n' + ' ON${only} ${tableName}${using}${keys}${options};\n', - createView: 'CREATE${orReplace}${temporary} VIEW ${name}${withOptions}\nAS ${selectStatement}${checkOption};\n${comment}\n', + createView: + 'CREATE${orReplace}${temporary} VIEW ${name}${withOptions}\nAS ${selectStatement}${checkOption};\n${comment}\n', viewSelectStatement: 'SELECT ${keys}\n\tFROM ${tableName}', diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 19ac24a..7efa72f 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -14,7 +14,6 @@ module.exports = (baseProvider, options, app) => { } = app.utils.general; const assignTemplates = app.utils.assignTemplates; const _ = app.require('lodash'); - const { decorateType, decorateDefault } = require('./helpers/columnDefinitionHelper')(_, wrap); const { getFunctionArguments, wrapInQuotes, getNamePrefixedWithSchemaName, getColumnsList, getViewData } = require('./helpers/general')({ _, @@ -62,13 +61,22 @@ module.exports = (baseProvider, options, app) => { getNamePrefixedWithSchemaName, }); - const { getIndexKeys, getIndexOptions } = require('./helpers/indeexHelper')({ + const { getIndexKeys, getIndexOptions } = require('./helpers/indexHelper')({ _, wrapInQuotes, checkAllKeysDeactivated, getColumnsList, }); + const { decorateType, decorateDefault, getColumnComments } = require('./helpers/columnDefinitionHelper')({ + _, + wrap, + assignTemplates, + templates, + commentIfDeactivated, + getNamePrefixedWithSchemaName, + }); + return { createDatabase({ databaseName, ifNotExist, comments, udfs, procedures }) { const comment = assignTemplates(templates.comment, { @@ -113,9 +121,10 @@ module.exports = (baseProvider, options, app) => { }, isActivated ) { + const tableName = getNamePrefixedWithSchemaName(name, dbData.databaseName); const comment = assignTemplates(templates.comment, { object: 'TABLE', - objectName: getNamePrefixedWithSchemaName(name, dbData.databaseName), + objectName: tableName, comment: description, }); @@ -128,10 +137,12 @@ module.exports = (baseProvider, options, app) => { const dividedForeignKeys = divideIntoActivatedAndDeactivated(foreignKeyConstraints, key => key.statement); const foreignKeyConstraintsString = generateConstraintsString(dividedForeignKeys, isActivated); + const columnDescriptions = '\n' + getColumnComments(tableName, columnDefinitions); + const tableStatement = assignTemplates(templates.createTable, { temporary: getTableTemporaryValue(temporary, unlogged), ifNotExist, - name: getNamePrefixedWithSchemaName(name, dbData.databaseName), + name: tableName, columnDefinitions: '\t' + _.join(columns, ',\n\t'), keyConstraints: keyConstraintsString, checkConstraints: !_.isEmpty(checkConstraints) ? ',\n\t' + _.join(checkConstraints, ',\n\t') : '', @@ -146,6 +157,7 @@ module.exports = (baseProvider, options, app) => { selectStatement, }), comment: description ? comment : '', + columnDescriptions, }); return tableStatement; diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index f9bc3cb..d4c630f 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -1,4 +1,4 @@ -module.exports = (_, wrap) => { +module.exports = ({ _, wrap, assignTemplates, templates, commentIfDeactivated, getNamePrefixedWithSchemaName }) => { const addLength = (type, length) => { return `${type}(${length})`; }; @@ -77,9 +77,26 @@ module.exports = (_, wrap) => { } }; + const getColumnComments = (tableName, columnDefinitions) => { + return _.chain(columnDefinitions) + .filter('comment') + .map(columnData => { + const comment = assignTemplates(templates.comment, { + object: 'COLUMN', + objectName: getNamePrefixedWithSchemaName(columnData.name, tableName), + comment: columnData.comment, + }); + + return commentIfDeactivated(comment, columnData); + }) + .join('\n') + .value(); + }; + return { decorateType, isNumeric, decorateDefault, + getColumnComments, }; }; diff --git a/forward_engineering/helpers/indeexHelper.js b/forward_engineering/helpers/indexHelper.js similarity index 100% rename from forward_engineering/helpers/indeexHelper.js rename to forward_engineering/helpers/indexHelper.js From b4cb187a909875bda2a673bc3e8506e0cd04f28a Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 11:32:31 +0300 Subject: [PATCH 38/69] FE: fixed include property of indexes --- forward_engineering/helpers/indexHelper.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/forward_engineering/helpers/indexHelper.js b/forward_engineering/helpers/indexHelper.js index 65e1c23..7615cb4 100644 --- a/forward_engineering/helpers/indexHelper.js +++ b/forward_engineering/helpers/indexHelper.js @@ -19,7 +19,7 @@ module.exports = ({ _, wrapInQuotes, checkAllKeysDeactivated, getColumnsList }) checkAllKeysDeactivated(index.include || []), isParentActivated ); - const include = _.trim(includeKeys) ? ` INCLUDE ${includeKeys}` : ''; + const include = index.include?.length ? ` INCLUDE ${_.trim(includeKeys)}` : ''; const withOptionsString = getWithOptions(index); const withOptions = withOptionsString ? ` WITH (\n\t${withOptionsString})` : ''; const tableSpace = index.index_tablespace_name ? ` TABLESPACE ${index.index_tablespace_name}` : ''; From c8a0d188ae6efb83f99784dd16e3227676dec4a6 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 11:33:30 +0300 Subject: [PATCH 39/69] Added apply to instance --- forward_engineering/api.js | 40 ++++++-------- forward_engineering/applyToInstanceHelper.js | 52 ++++++------------- reverse_engineering/api.js | 17 +----- reverse_engineering/helpers/loggerHelper.js | 25 +++++++++ .../helpers/postgresService.js | 4 ++ reverse_engineering/helpers/queryConstants.js | 2 +- 6 files changed, 62 insertions(+), 78 deletions(-) create mode 100644 reverse_engineering/helpers/loggerHelper.js diff --git a/forward_engineering/api.js b/forward_engineering/api.js index 19130c5..5c875d6 100644 --- a/forward_engineering/api.js +++ b/forward_engineering/api.js @@ -1,29 +1,21 @@ const reApi = require('../reverse_engineering/api'); +const { createLogger } = require('../reverse_engineering/helpers/loggerHelper'); const applyToInstanceHelper = require('./applyToInstanceHelper'); module.exports = { - applyToInstance(connectionInfo, logger, callback, app) { - logger.clear(); - logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); + applyToInstance(connectionInfo, logger, callback, app) { + logger.clear(); + logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); - applyToInstanceHelper.applyToInstance(connectionInfo, logger, app) - .then(result => { - callback(null, result); - }) - .catch(error => { - const err = { - message: error.message, - stack: error.stack, - }; - logger.log('error', err, 'Error when applying to instance'); - callback(err); - }); - }, - testConnection(connectionInfo, logger, callback, app) { - reApi.testConnection(connectionInfo, logger, callback, app) - .then( - callback, - callback - ); - } -}; \ No newline at end of file + const postgresLogger = createLogger({ + title: 'Apply to instance', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); + + applyToInstanceHelper.applyToInstance(connectionInfo, postgresLogger, app).then(callback, callback); + }, + testConnection(connectionInfo, logger, callback, app) { + reApi.testConnection(connectionInfo, logger, callback, app).then(callback, callback); + }, +}; diff --git a/forward_engineering/applyToInstanceHelper.js b/forward_engineering/applyToInstanceHelper.js index 83789da..be9feb6 100644 --- a/forward_engineering/applyToInstanceHelper.js +++ b/forward_engineering/applyToInstanceHelper.js @@ -1,43 +1,21 @@ -const connectionHelper = require('../reverse_engineering/helpers/connectionHelper'); - -const removeDelimiter = (statement) => { - const regExp = /delimiter (.*)/i; - - if (!regExp.test(statement)) { - return statement; - } - - const delimiter = statement.match(regExp)[1]; - const statementWithoutDelimiter = statement.replace(new RegExp(regExp, 'gi'), ''); - - return statementWithoutDelimiter.trim().replace(new RegExp(delimiter.split('').map(n => '\\' + n).join('') + '$'), ''); -}; +const postgresService = require('../reverse_engineering/helpers/postgresService'); const applyToInstance = async (connectionInfo, logger, app) => { - const _ = app.require('lodash'); - const async = app.require('async'); - const connection = connectionHelper.createInstance( - await connectionHelper.connect(connectionInfo), - logger, - ); - - try { - const queries = connectionInfo.script.split('\n\n').map((query) => { - return removeDelimiter(_.trim(query)); - }).filter(Boolean); - let i = 0; - - await async.mapSeries(queries, async query => { - const message = 'Query: ' + query.split('\n').shift().substr(0, 150); - logger.progress({ message }); - await connection.query(query); - }); - - } catch (e) { - connectionHelper.close(); - throw e; - } + try { + postgresService.setDependencies(app); + await postgresService.connect(connectionInfo, logger); + await postgresService.logVersion(); + await postgresService.applyScript(connectionInfo.script); + } catch (error) { + logger.error(error); + throw prepareError(error); + } +}; +const prepareError = error => { + error = JSON.stringify(error, Object.getOwnPropertyNames(error)); + error = JSON.parse(error); + return error; }; module.exports = { applyToInstance }; diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 5437b08..04dc6a4 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,5 +1,6 @@ 'use strict'; +const { createLogger } = require('./helpers/loggerHelper'); const postgresService = require('./helpers/postgresService'); module.exports = { @@ -166,22 +167,6 @@ module.exports = { }, }; -const createLogger = ({ title, logger, hiddenKeys }) => { - return { - info(message, additionalData = {}) { - logger.log('info', { message, ...additionalData }, title, hiddenKeys); - }, - - progress(message, dbName = '', tableName = '') { - logger.progress({ message, containerName: dbName, entityName: tableName }); - }, - - error(error) { - logger.log('error', prepareError(error), title); - }, - }; -}; - const prepareError = error => { error = JSON.stringify(error, Object.getOwnPropertyNames(error)); error = JSON.parse(error); diff --git a/reverse_engineering/helpers/loggerHelper.js b/reverse_engineering/helpers/loggerHelper.js new file mode 100644 index 0000000..6a425f5 --- /dev/null +++ b/reverse_engineering/helpers/loggerHelper.js @@ -0,0 +1,25 @@ +const createLogger = ({ title, logger, hiddenKeys }) => { + return { + info(message, additionalData = {}) { + logger.log('info', { message, ...additionalData }, title, hiddenKeys); + }, + + progress(message, dbName = '', tableName = '') { + logger.progress({ message, containerName: dbName, entityName: tableName }); + }, + + error(error) { + logger.log('error', prepareError(error), title); + }, + }; +}; + +const prepareError = error => { + error = JSON.stringify(error, Object.getOwnPropertyNames(error)); + error = JSON.parse(error); + return error; +}; + +module.exports = { + createLogger, +}; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 32675d6..7a45f14 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -83,6 +83,10 @@ module.exports = { return db.query(queryConstants.PING); }, + applyScript(script) { + return db.query(script); + }, + async logVersion() { const versionRow = await db.queryTolerant(queryConstants.GET_VERSION, [], true); const version = versionRow?.version || 'Version not retrieved'; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index b184d7d..1364f24 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -228,7 +228,7 @@ const getQueryName = query => { const queryEntry = Object.entries(queryConstants).find(([queryName, constantQuery]) => query === constantQuery) || []; - return queryEntry[0]; + return queryEntry[0] || 'Custom query'; }; module.exports = { From 241b860ca5140603558990291b6dd299c8528abe Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 13:58:11 +0300 Subject: [PATCH 40/69] Added support for tsvector and tsquery. Fixed RE of these properties --- .../container_level/containerLevelConfig.json | 1 - .../field_level/fieldLevelConfig.json | 14 ++++++-- .../helpers/postgresHelpers/columnHelper.js | 3 ++ .../helpers/postgresHelpers/tableHelper.js | 32 ++++++++----------- 4 files changed, 28 insertions(+), 22 deletions(-) diff --git a/properties_pane/container_level/containerLevelConfig.json b/properties_pane/container_level/containerLevelConfig.json index 20c54b7..fc74841 100644 --- a/properties_pane/container_level/containerLevelConfig.json +++ b/properties_pane/container_level/containerLevelConfig.json @@ -323,7 +323,6 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "minValue": 0, - "defaultValue": 1000, "step": 1, "propertyTooltip": "A positive number giving the estimated number of rows that the planner should expect the function to return.", "dependency": { diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index ea045ce..3ec5e01 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -130,7 +130,9 @@ making sure that you maintain a proper JSON format. "varchar", "text", "bit", - "varbit" + "varbit", + "tsvector", + "tsquery" ], "data": "options", "valueType": "string", @@ -185,7 +187,7 @@ making sure that you maintain a proper JSON format. "varchar", "text" ] - } + }, }, { "propertyName": "Not null", @@ -514,7 +516,13 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "allowNegative": false, - "typeDecorator": true + "typeDecorator": true, + "dependency": { + "key": "mode", + "value": [ + "numeric" + ] + } }, { "propertyName": "Scale", diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 2dbdf5b..60d40e3 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -92,6 +92,8 @@ const mapType = (userDefinedTypes, type) => { case 'bit': case 'char': case 'text': + case 'tsvector': + case 'tsquery': return { type: 'char', mode: type }; case 'bit varying': return { type: 'char', mode: 'varbit' }; @@ -167,6 +169,7 @@ const mapType = (userDefinedTypes, type) => { case 'regrole': case 'regtype': return { type: 'oid', mode: type }; + default: { if (_.some(userDefinedTypes, { name: type })) { return { $ref: `#/definitions/${type}` }; diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index f4b633f..5270341 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -79,10 +79,7 @@ const prepareTablePartition = (partitionResult, tableColumns) => { const key = isExpression ? 'partitioning_expression' : 'compositePartitionKey'; const value = isExpression ? getPartitionExpression(partitionResult, tableColumns) - : _.map( - partitionResult.partition_attributes_positions, - getColumnNameByPosition(tableColumns) - ); + : _.map(partitionResult.partition_attributes_positions, getColumnNameByPosition(tableColumns)); return [ { @@ -127,7 +124,6 @@ const getPartitionExpression = (partitionResult, tableColumns) => { .value(); }; - const splitByEqualitySymbol = item => _.split(item, '='); const checkHaveJsonTypes = columns => { @@ -209,7 +205,7 @@ const getCheckConstraint = constraint => { }; }; -const prepareTableIndexes = (tableIndexesResult) => { +const prepareTableIndexes = tableIndexesResult => { return _.map(tableIndexesResult, indexData => { const index = { indxName: indexData.indexname, @@ -217,10 +213,10 @@ const prepareTableIndexes = (tableIndexesResult) => { unique: indexData.index_unique ?? false, columns: mapIndexColumns(indexData), index_tablespace_name: indexData.tablespace_name, - index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters) + index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters), }; - return clearEmptyPropertiesInObject(index) + return clearEmptyPropertiesInObject(index); }); }; @@ -254,9 +250,9 @@ const getNullsOrder = nulls_first => { return nulls_first ? 'NULLS FIRST' : 'NULLS LAST'; }; -const getIndexStorageParameters = (storageParameters) => { - if(!storageParameters) { - return null +const getIndexStorageParameters = storageParameters => { + if (!storageParameters) { + return null; } const params = _.fromPairs(_.map(storageParameters, param => splitByEqualitySymbol(param))); @@ -268,13 +264,13 @@ const getIndexStorageParameters = (storageParameters) => { fastupdate: params.fastupdate, gin_pending_list_limit: params.gin_pending_list_limit, pages_per_range: params.pages_per_range, - autosummarize: params.autosummarize - } + autosummarize: params.autosummarize, + }; - return clearEmptyPropertiesInObject(data) -} + return clearEmptyPropertiesInObject(data); +}; -const prepareTableLevelData = (tableLevelData) => { +const prepareTableLevelData = tableLevelData => { const temporary = tableLevelData?.relpersistence === 't'; const unlogged = tableLevelData?.relpersistence === 'u'; const storage_parameter = prepareStorageParameters(tableLevelData?.reloptions); @@ -285,8 +281,8 @@ const prepareTableLevelData = (tableLevelData) => { unlogged, storage_parameter, table_tablespace_name, - } -} + }; +}; module.exports = { prepareStorageParameters, From 35e9166b2ce572856c2008c7a4e2b81fcc2e8895 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 14:03:58 +0300 Subject: [PATCH 41/69] FE: fixed fe of functions and procedures, fixed fe of precision value for numeric types --- forward_engineering/api.js | 7 ++++++- forward_engineering/configs/templates.js | 4 ++-- forward_engineering/configs/types.js | 6 ++++++ forward_engineering/ddlProvider.js | 13 +++++++++---- .../helpers/columnDefinitionHelper.js | 17 +---------------- 5 files changed, 24 insertions(+), 23 deletions(-) diff --git a/forward_engineering/api.js b/forward_engineering/api.js index 5c875d6..630a9e8 100644 --- a/forward_engineering/api.js +++ b/forward_engineering/api.js @@ -5,7 +5,12 @@ const applyToInstanceHelper = require('./applyToInstanceHelper'); module.exports = { applyToInstance(connectionInfo, logger, callback, app) { logger.clear(); - logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); + logger.log( + 'info', + app.require('lodash').omit(connectionInfo, 'script', 'containerData'), + 'connectionInfo', + connectionInfo.hiddenKeys + ); const postgresLogger = createLogger({ title: 'Apply to instance', diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 828bd5d..4709c80 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -34,10 +34,10 @@ module.exports = { '\tRETURNS ${returnType}\n' + '\tLANGUAGE ${language}\n' + '${properties}' + - 'AS ${definition};\n', + 'AS $BODY$\n${definition}\n$BODY$;\n', createProcedure: - 'CREATE${orReplace} PROCEDURE ${name} (${parameters})\n' + '\tLANGUAGE ${language}\n' + 'AS ${body};\n', + 'CREATE${orReplace} PROCEDURE ${name} (${parameters})\n' + '\tLANGUAGE ${language}\n' + 'AS $BODY$\n${body}\n$BODY$;\n', createCompositeType: 'CREATE TYPE ${name} AS (\n\t${columnDefinitions}\n);\n${comment}\n', createEnumType: 'CREATE TYPE ${name} AS ENUM (${values});\n${comment}\n', diff --git a/forward_engineering/configs/types.js b/forward_engineering/configs/types.js index d90b499..25a62da 100644 --- a/forward_engineering/configs/types.js +++ b/forward_engineering/configs/types.js @@ -16,6 +16,12 @@ module.exports = { size: 1, mode: 'varying', }, + "tsvector": { + mode: 'text' + }, + "tsquery": { + mode: 'text' + }, smallint: { capacity: 2, }, diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 7efa72f..4d8ef16 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -91,10 +91,7 @@ module.exports = (baseProvider, options, app) => { comment: comments ? comment : '', }); - const createFunctionStatement = getFunctionsScript(databaseName, udfs); - const createProceduresStatement = getProceduresScript(databaseName, procedures); - - return _.trim([schemaStatement, createFunctionStatement, createProceduresStatement].join('\n\n')); + return schemaStatement; }, createTable( @@ -370,6 +367,14 @@ module.exports = (baseProvider, options, app) => { return getUserDefinedType(udt, columns); }, + createFunctions({ databaseName, udfs }) { + return getFunctionsScript(databaseName, udfs); + }, + + createProcedures({ databaseName, procedures }) { + return getProceduresScript(databaseName, procedures); + }, + getDefaultType(type) { return defaultTypes[type]; }, diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index d4c630f..5337ef9 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -27,22 +27,8 @@ module.exports = ({ _, wrap, assignTemplates, templates, commentIfDeactivated, g return type; }; - const isNumeric = type => - [ - 'smallint', - 'integer', - 'bigint', - 'numeric', - 'real', - 'double precision', - 'smallserial', - 'serial', - 'bigserial', - 'money', - ].includes(type); - const canHaveLength = type => ['char', 'varchar', 'bit', 'varbit'].includes(type); - const canHavePrecision = type => isNumeric(type); + const canHavePrecision = type => type === 'numeric'; const canHaveTimePrecision = type => ['time', 'timestamp'].includes(type); const canHaveScale = type => type === 'numeric'; @@ -95,7 +81,6 @@ module.exports = ({ _, wrap, assignTemplates, templates, commentIfDeactivated, g return { decorateType, - isNumeric, decorateDefault, getColumnComments, }; From ff6d95f0024fd230320c166c154b2ab265487260 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 14:04:08 +0300 Subject: [PATCH 42/69] Remove trailing comma --- properties_pane/field_level/fieldLevelConfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 3ec5e01..67528ae 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -187,7 +187,7 @@ making sure that you maintain a proper JSON format. "varchar", "text" ] - }, + } }, { "propertyName": "Not null", From 8993782cadcaf44b74ddc99b229c57b7ba05c0ca Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 15:02:59 +0300 Subject: [PATCH 43/69] FIxed indexes to properly work with different index_method, fixed fe and re accordingly to new config --- forward_engineering/ddlProvider.js | 5 +++- forward_engineering/helpers/indexHelper.js | 6 ++-- .../container_level/containerLevelConfig.json | 2 +- .../entity_level/entityLevelConfig.json | 30 +++++++++++++++++++ .../helpers/postgresHelpers/tableHelper.js | 12 ++++++-- reverse_engineering/helpers/queryConstants.js | 4 +-- 6 files changed, 50 insertions(+), 9 deletions(-) diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 4d8ef16..e98b8f7 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -193,7 +193,10 @@ module.exports = (baseProvider, options, app) => { const only = index.only ? ' ONLY' : ''; const using = index.index_method ? ` USING ${_.toUpper(index.index_method)}` : ''; - const keys = getIndexKeys(index.columns, isParentActivated); + const keys = getIndexKeys( + index.index_method === 'btree' ? index.btree_columns : index.columns, + isParentActivated + ); const options = getIndexOptions(index, isParentActivated); return commentIfDeactivated( diff --git a/forward_engineering/helpers/indexHelper.js b/forward_engineering/helpers/indexHelper.js index 7615cb4..3981eee 100644 --- a/forward_engineering/helpers/indexHelper.js +++ b/forward_engineering/helpers/indexHelper.js @@ -1,10 +1,12 @@ module.exports = ({ _, wrapInQuotes, checkAllKeysDeactivated, getColumnsList }) => { const mapIndexKey = ({ name, sortOrder, nullsOrder, collation, opclass }) => { + const sortOrderStr = sortOrder ? ` ${sortOrder}` : ''; const nullsOrderStr = nullsOrder ? ` ${nullsOrder}` : ''; - const collationStr = collation ? ` COLLATE "${collation}"` : ''; + const collate = _.includes(collation, '"') ? collation : `"${collation}"`; + const collationStr = collation ? ` COLLATE ${collate}` : ''; const opclassStr = opclass ? ` ${opclass}` : ''; - return `${wrapInQuotes(name)}${collationStr}${opclassStr} ${sortOrder}${nullsOrderStr}`; + return `${wrapInQuotes(name)}${collationStr}${opclassStr}${sortOrderStr}${nullsOrderStr}`; }; const getIndexKeys = (columns = [], isParentActivated) => { diff --git a/properties_pane/container_level/containerLevelConfig.json b/properties_pane/container_level/containerLevelConfig.json index fc74841..9ca1265 100644 --- a/properties_pane/container_level/containerLevelConfig.json +++ b/properties_pane/container_level/containerLevelConfig.json @@ -219,7 +219,7 @@ making sure that you maintain a proper JSON format. }, { "key": "functionReturnsSetOf", - "exists": false + "exist": false } ] } diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 1089b54..3ffcdde 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -821,6 +821,32 @@ making sure that you maintain a proper JSON format. "propertyType": "fieldList", "template": "orderedList", "propertyTooltip": "The name of a column of the table.", + "attributeList": { + "collation": { + "propertyType": "text", + "placeholder": "Collation", + "propertyTooltip": "The name of the collation to use for the index." + }, + "opclass": { + "propertyType": "text", + "placeholder": "Opclass", + "propertyTooltip": "The name of an operator class." + } + }, + "dependency": { + "type": "not", + "values": { + "key": "index_method", + "value": "btree" + } + } + }, + { + "propertyName": "Columns", + "propertyKeyword": "btree_columns", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The name of a column of the table.", "attributeList": { "sortOrder": { "propertyType": "select", @@ -849,6 +875,10 @@ making sure that you maintain a proper JSON format. "placeholder": "Opclass", "propertyTooltip": "The name of an operator class." } + }, + "dependency": { + "key": "index_method", + "value": "btree" } }, { diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 5270341..264e69d 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -207,13 +207,17 @@ const getCheckConstraint = constraint => { const prepareTableIndexes = tableIndexesResult => { return _.map(tableIndexesResult, indexData => { + const columns = mapIndexColumns(indexData); + const index = { indxName: indexData.indexname, index_method: indexData.index_method, unique: indexData.index_unique ?? false, - columns: mapIndexColumns(indexData), index_tablespace_name: indexData.tablespace_name, index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters), + ...(indexData.index_method === 'btree' + ? { btree_columns: columns } + : { columns: _.map(columns, column => _.omit(column, 'sortOrder', 'nullsOrder')) }), }; return clearEmptyPropertiesInObject(index); @@ -227,15 +231,17 @@ const mapIndexColumns = indexData => { return; } - const sortOrder = _.get(indexData, `ascending.${itemIndex}`, false) ? 'ASC' : 'DESC'; + const sortOrder = _.get(indexData, `ascendings.${itemIndex}`, false) ? 'ASC' : 'DESC'; const nullsOrder = getNullsOrder(_.get(indexData, `nulls_first.${itemIndex}`)); - const opclass = _.get(indexData, `opclasses.${itemIndex}`); + const opclass = _.get(indexData, `opclasses.${itemIndex}`, ''); + const collation = _.get(indexData, `collations.${itemIndex}`, ''); return { name: columnName, sortOrder, nullsOrder, opclass, + collation, }; }) .compact() diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 1364f24..886c1c8 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -89,10 +89,10 @@ const queryConstants = { WHEN opclass_t.opcname is not null THEN format('%I.%I',opclas_namespace.nspname,opclass_t.opcname) END AS opclass, CASE - WHEN indexes.ord > 0 THEN pg_catalog.pg_index_column_has_property(indexes.indexrelid, indexes.key, 'asc') + WHEN indexes.ord > 0 THEN pg_catalog.pg_index_column_has_property(indexes.indexrelid, indexes.ord, 'asc') END AS ascending, CASE - WHEN indexes.ord > 0 THEN pg_catalog.pg_index_column_has_property(indexes.indexrelid, indexes.key, 'nulls_first') + WHEN indexes.ord > 0 THEN pg_catalog.pg_index_column_has_property(indexes.indexrelid, indexes.ord, 'nulls_first') END AS nulls_first, pg_catalog.pg_get_indexdef(indexes.indexrelid, ord, false) AS expression FROM From 8593dda824a7a83a265b2054ac50a3799e67626a Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 15:41:37 +0300 Subject: [PATCH 44/69] Fixed re and fe of functions --- forward_engineering/helpers/functionHelper.js | 2 +- .../container_level/containerLevelConfig.json | 15 +-------------- .../entity_level/entityLevelConfig.json | 3 ++- .../helpers/postgresHelpers/functionHelper.js | 2 +- reverse_engineering/helpers/postgresService.js | 4 +++- 5 files changed, 8 insertions(+), 18 deletions(-) diff --git a/forward_engineering/helpers/functionHelper.js b/forward_engineering/helpers/functionHelper.js index 3cbb2fb..b44ad9e 100644 --- a/forward_engineering/helpers/functionHelper.js +++ b/forward_engineering/helpers/functionHelper.js @@ -7,7 +7,7 @@ module.exports = ({ _, templates, assignTemplates, getFunctionArguments, getName name: getNamePrefixedWithSchemaName(udf.name, schemaName), orReplace: orReplace, parameters: getFunctionArguments(udf.functionArguments), - returnType: udf.functionReturnsSetOf ? 'SETOF' : udf.functionReturnType, + returnType: udf.functionReturnsSetOf ? `SETOF ${udf.functionReturnType}` : udf.functionReturnType, language: udf.functionLanguage, properties: getProperties(udf), definition: udf.functionDefinition, diff --git a/properties_pane/container_level/containerLevelConfig.json b/properties_pane/container_level/containerLevelConfig.json index 9ca1265..978983c 100644 --- a/properties_pane/container_level/containerLevelConfig.json +++ b/properties_pane/container_level/containerLevelConfig.json @@ -209,20 +209,7 @@ making sure that you maintain a proper JSON format. "propertyName": "Returns data type", "propertyKeyword": "functionReturnType", "propertyTooltip": "Any valid PostgreSQL data type", - "propertyType": "text", - "dependency": { - "type": "or", - "values": [ - { - "key": "functionReturnsSetOf", - "value": false - }, - { - "key": "functionReturnsSetOf", - "exist": false - } - ] - } + "propertyType": "text" }, { "propertyName": "Language", diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 3ffcdde..71aa440 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -139,7 +139,8 @@ making sure that you maintain a proper JSON format. "propertyName": "If not exists", "propertyKeyword": "ifNotExist", "propertyTooltip": "When the IF NOT EXISTS clause is used, MariaDB will return a warning instead of an error if the specified table already exists. Cannot be used together with the OR REPLACE clause.", - "propertyType": "checkbox" + "propertyType": "checkbox", + "defaultValue": true }, { "propertyName": "Inherits parent table", diff --git a/reverse_engineering/helpers/postgresHelpers/functionHelper.js b/reverse_engineering/helpers/postgresHelpers/functionHelper.js index 47faefb..673440b 100644 --- a/reverse_engineering/helpers/postgresHelpers/functionHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/functionHelper.js @@ -51,7 +51,7 @@ const mapFunctionData = (functionData, functionArgs, additionalData) => { name: functionData.name, functionDescription: additionalData?.description, functionArguments: mapFunctionArgs(functionArgs), - functionReturnsSetOf: additionalData?.returnsSet, + functionReturnsSetOf: additionalData?.returns_set, functionReturnType: functionData.return_data_type, functionLanguage: _.toLower(functionData.external_language), functionDefinition: functionData.routine_definition, diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 7a45f14..24a4b91 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -159,7 +159,9 @@ module.exports = { const functionAdditionalData = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ADDITIONAL, [ schemaOid, ]); - const [functions, procedures] = _.partition(functionsWithProcedures, { routine_type: 'FUNCTION' }); + const [functions, procedures] = _.partition(_.filter(functionsWithProcedures, 'routine_type'), { + routine_type: 'FUNCTION', + }); const userDefinedFunctions = await mapPromises(functions, async functionData => { const functionArgs = await db.queryTolerant(queryConstants.GET_FUNCTIONS_WITH_PROCEDURES_ARGS, [ From 75a4fb407f10a0fc5fb2366ee4dab33020ac200b Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 15:46:45 +0300 Subject: [PATCH 45/69] Fixed ifNotExists for table, fixed ndents --- forward_engineering/configs/templates.js | 6 +++--- forward_engineering/ddlProvider.js | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 4709c80..a57d230 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -4,9 +4,9 @@ module.exports = { comment: "COMMENT ON ${object} ${objectName} IS '${comment}';\n", createTable: - 'CREATE${temporary} TABLE${ifNotExist} ${name} (\n' + + '\nCREATE${temporary} TABLE${ifNotExist} ${name} (\n' + '${columnDefinitions}${keyConstraints}${checkConstraints}${foreignKeyConstraints}\n' + - ')${options};\n${comment}${columnDescriptions}\n', + ')${options};\n${comment}${columnDescriptions}', columnDefinition: '${name} ${type}${collation}${primaryKey}${uniqueKey}${defaultValue}${notNull}', @@ -21,7 +21,7 @@ module.exports = { index: 'CREATE${unique} INDEX${concurrently}${ifNotExist} ${name}\n' + - ' ON${only} ${tableName}${using}${keys}${options};\n', + ' ON${only} ${tableName}${using}${keys}${options};\n\n', createView: 'CREATE${orReplace}${temporary} VIEW ${name}${withOptions}\nAS ${selectStatement}${checkOption};\n${comment}\n', diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index e98b8f7..5a09300 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -118,6 +118,7 @@ module.exports = (baseProvider, options, app) => { }, isActivated ) { + const ifNotExistStr = ifNotExist ? ' IF NOT EXISTS' : ''; const tableName = getNamePrefixedWithSchemaName(name, dbData.databaseName); const comment = assignTemplates(templates.comment, { object: 'TABLE', @@ -138,7 +139,7 @@ module.exports = (baseProvider, options, app) => { const tableStatement = assignTemplates(templates.createTable, { temporary: getTableTemporaryValue(temporary, unlogged), - ifNotExist, + ifNotExist: ifNotExistStr, name: tableName, columnDefinitions: '\t' + _.join(columns, ',\n\t'), keyConstraints: keyConstraintsString, From 79401b0a451832ec8449690fd4ca85005efdda26 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 16:34:13 +0300 Subject: [PATCH 46/69] RE: fixed RE of functions --- .../helpers/postgresHelpers/functionHelper.js | 14 +++++++++++++- reverse_engineering/helpers/queryConstants.js | 3 ++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/helpers/postgresHelpers/functionHelper.js b/reverse_engineering/helpers/postgresHelpers/functionHelper.js index 673440b..32357a0 100644 --- a/reverse_engineering/helpers/postgresHelpers/functionHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/functionHelper.js @@ -8,11 +8,23 @@ const mapFunctionArgs = args => { return _.map(args, arg => ({ argumentMode: arg.parameter_mode, argumentName: arg.parameter_name, - argumentType: arg.data_type, + argumentType: getArgType(arg.data_type, arg.udt_name), defaultExpression: arg.parameter_default, })); }; +const getArgType = (argType, argUdt) => { + if (argType === 'USER-DEFINED') { + return argUdt; + } + + if(argType === 'ARRAY') { + return argUdt.slice(1) + '[]' + } + + return argType; +}; + const getVolatility = volatility => { switch (volatility) { case 'i': diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 886c1c8..33f6720 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -155,7 +155,8 @@ const queryConstants = { SELECT parameter_name, parameter_mode, parameter_default, - data_type + data_type, + udt_name FROM information_schema.parameters WHERE specific_name = $1 ORDER BY ordinal_position;`, From cf1223aa67630c0e1c2872ca74d5e429054b6d99 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 12 Oct 2021 17:34:24 +0300 Subject: [PATCH 47/69] FE: fixed issue with not valid comments, added support for FE on model level --- forward_engineering/config.json | 7 +- forward_engineering/configs/templates.js | 6 +- forward_engineering/ddlProvider.js | 26 +++-- .../helpers/columnDefinitionHelper.js | 12 +- forward_engineering/helpers/general.js | 110 +++++++++++++++++- forward_engineering/helpers/udtHelper.js | 11 +- localization/en.json | 5 +- 7 files changed, 159 insertions(+), 18 deletions(-) diff --git a/forward_engineering/config.json b/forward_engineering/config.json index 760580f..357ecf8 100644 --- a/forward_engineering/config.json +++ b/forward_engineering/config.json @@ -7,5 +7,10 @@ "entity": true, "view": true }, - "applyScriptToInstance": true + "applyScriptToInstance": true, + "combinedContainers": true, + "feLevelSelector": { + "container": true, + "model": true + } } \ No newline at end of file diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index a57d230..e7bbc4c 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -1,7 +1,7 @@ module.exports = { createSchema: 'CREATE SCHEMA${ifNotExist} ${name};\nSET search_path TO ${name};\n${comment}\n', - comment: "COMMENT ON ${object} ${objectName} IS '${comment}';\n", + comment: 'COMMENT ON ${object} ${objectName} IS ${comment};\n', createTable: '\nCREATE${temporary} TABLE${ifNotExist} ${name} (\n' + @@ -37,7 +37,9 @@ module.exports = { 'AS $BODY$\n${definition}\n$BODY$;\n', createProcedure: - 'CREATE${orReplace} PROCEDURE ${name} (${parameters})\n' + '\tLANGUAGE ${language}\n' + 'AS $BODY$\n${body}\n$BODY$;\n', + 'CREATE${orReplace} PROCEDURE ${name} (${parameters})\n' + + '\tLANGUAGE ${language}\n' + + 'AS $BODY$\n${body}\n$BODY$;\n', createCompositeType: 'CREATE TYPE ${name} AS (\n\t${columnDefinitions}\n);\n${comment}\n', createEnumType: 'CREATE TYPE ${name} AS ENUM (${values});\n${comment}\n', diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 5a09300..021d12e 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -14,12 +14,18 @@ module.exports = (baseProvider, options, app) => { } = app.utils.general; const assignTemplates = app.utils.assignTemplates; const _ = app.require('lodash'); - const { getFunctionArguments, wrapInQuotes, getNamePrefixedWithSchemaName, getColumnsList, getViewData } = - require('./helpers/general')({ - _, - divideIntoActivatedAndDeactivated, - commentIfDeactivated, - }); + const { + getFunctionArguments, + wrapInQuotes, + getNamePrefixedWithSchemaName, + getColumnsList, + getViewData, + wrapComment, + } = require('./helpers/general')({ + _, + divideIntoActivatedAndDeactivated, + commentIfDeactivated, + }); const { generateConstraintsString, foreignKeysToString, foreignActiveKeysToString, createKeyConstraint } = require('./helpers/constraintsHelper')({ _, @@ -59,6 +65,7 @@ module.exports = (baseProvider, options, app) => { assignTemplates, templates, getNamePrefixedWithSchemaName, + wrapComment, }); const { getIndexKeys, getIndexOptions } = require('./helpers/indexHelper')({ @@ -75,6 +82,7 @@ module.exports = (baseProvider, options, app) => { templates, commentIfDeactivated, getNamePrefixedWithSchemaName, + wrapComment, }); return { @@ -82,7 +90,7 @@ module.exports = (baseProvider, options, app) => { const comment = assignTemplates(templates.comment, { object: 'SCHEMA', objectName: wrapInQuotes(databaseName), - comment: comments, + comment: wrapComment(comments), }); const schemaStatement = assignTemplates(templates.createSchema, { @@ -123,7 +131,7 @@ module.exports = (baseProvider, options, app) => { const comment = assignTemplates(templates.comment, { object: 'TABLE', objectName: tableName, - comment: description, + comment: wrapComment(description), }); const dividedKeysConstraints = divideIntoActivatedAndDeactivated( @@ -302,7 +310,7 @@ module.exports = (baseProvider, options, app) => { const comment = assignTemplates(templates.comment, { object: 'VIEW', objectName: viewName, - comment: viewData.comment, + comment: wrapComment(viewData.comment), }); const allDeactivated = checkAllKeysDeactivated(viewData.keys || []); diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index 5337ef9..3fc4e90 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -1,4 +1,12 @@ -module.exports = ({ _, wrap, assignTemplates, templates, commentIfDeactivated, getNamePrefixedWithSchemaName }) => { +module.exports = ({ + _, + wrap, + assignTemplates, + templates, + commentIfDeactivated, + getNamePrefixedWithSchemaName, + wrapComment, +}) => { const addLength = (type, length) => { return `${type}(${length})`; }; @@ -70,7 +78,7 @@ module.exports = ({ _, wrap, assignTemplates, templates, commentIfDeactivated, g const comment = assignTemplates(templates.comment, { object: 'COLUMN', objectName: getNamePrefixedWithSchemaName(columnData.name, tableName), - comment: columnData.comment, + comment: wrapComment(columnData.comment), }); return commentIfDeactivated(comment, columnData); diff --git a/forward_engineering/helpers/general.js b/forward_engineering/helpers/general.js index 5243353..438fd78 100644 --- a/forward_engineering/helpers/general.js +++ b/forward_engineering/helpers/general.js @@ -1,3 +1,101 @@ +const POSTGRES_RESERVED_WORDS = [ + 'ALL', + 'ANALYSE', + 'ANALYZE', + 'AND', + 'ANY', + 'ARRAY', + 'ASC', + 'ASYMMETRIC', + 'AUTHORIZATION', + 'BINARY', + 'BOTH', + 'CASE', + 'CAST', + 'CHECK', + 'COLLATE', + 'COLUMN', + 'CONCURRENTLY', + 'CONSTRAINT', + 'CREATE', + 'CROSS', + 'CURRENT_CATALOG', + 'CURRENT_DATE', + 'CURRENT_ROLE', + 'CURRENT_SCHEMA', + 'CURRENT_TIME', + 'CURRENT_TIMESTAMP', + 'CURRENT_USER', + 'DEFAULT', + 'DEFERRABLE', + 'DESC', + 'DISTINCT', + 'DO', + 'ELSE', + 'END', + 'EXCEPT', + 'FALSE', + 'FOR', + 'FOREIGN', + 'FREEZE', + 'FROM', + 'FULL', + 'GRANT', + 'GROUP', + 'HAVING', + 'ILIKE', + 'IN', + 'INITIALLY', + 'INTERSECT', + 'INTO', + 'IS', + 'ISNULL', + 'JOIN', + 'LATERAL', + 'LEADING', + 'LEFT', + 'LIKE', + 'LIMIT', + 'LOCALTIME', + 'LOCALTIMESTAMP', + 'NATURAL', + 'NOT', + 'NULL', + 'OFFSET', + 'ON', + 'ONLY', + 'OR', + 'ORDER', + 'OUTER', + 'OVERLAPS', + 'PLACING', + 'PRIMARY', + 'REFERENCES', + 'RETURNING', + 'RIGHT', + 'SELECT', + 'SESSION_USER', + 'SIMILAR', + 'SOME', + 'SYMMETRIC', + 'TABLE', + 'TABLESAMPLE', + 'THEN', + 'TO', + 'TRAILING', + 'TRUE', + 'UNION', + 'UNIQUE', + 'USER', + 'USING', + 'VARIADIC', + 'VERBOSE', + 'WHEN', + 'WHERE', + 'WINDOW', + 'WITH', +]; + module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated }) => { const getFunctionArguments = functionArguments => { return _.map(functionArguments, arg => { @@ -15,7 +113,8 @@ module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated } return wrapInQuotes(name); }; - const wrapInQuotes = name => (/\s/.test(name) ? `"${name}"` : name); + const wrapInQuotes = name => + /\s/.test(name) || _.includes(POSTGRES_RESERVED_WORDS, _.toUpper(name)) ? `"${name}"` : name; const columnMapToString = ({ name }) => wrapInQuotes(name); @@ -78,11 +177,20 @@ module.exports = ({ _, divideIntoActivatedAndDeactivated, commentIfDeactivated } ); }; + const wrapComment = comment => { + if (_.includes(comment, "'")) { + return `'${comment.replace("'", "''")}'`; + } + + return `'${comment}'`; + }; + return { getFunctionArguments, getNamePrefixedWithSchemaName, wrapInQuotes, getColumnsList, getViewData, + wrapComment, }; }; diff --git a/forward_engineering/helpers/udtHelper.js b/forward_engineering/helpers/udtHelper.js index 0a5102a..dfa79bf 100644 --- a/forward_engineering/helpers/udtHelper.js +++ b/forward_engineering/helpers/udtHelper.js @@ -1,10 +1,17 @@ -module.exports = ({ _, commentIfDeactivated, assignTemplates, templates, getNamePrefixedWithSchemaName }) => { +module.exports = ({ + _, + commentIfDeactivated, + assignTemplates, + templates, + getNamePrefixedWithSchemaName, + wrapComment, +}) => { const getPlainUdt = (udt, columns) => { const udtName = getNamePrefixedWithSchemaName(udt.name, udt.databaseName); const comment = assignTemplates(templates.comment, { object: 'TYPE', objectName: udtName, - comment: udt.comment, + comment: wrapComment(udt.comment), }); switch (udt.type) { diff --git a/localization/en.json b/localization/en.json index 4d4e388..9212313 100644 --- a/localization/en.json +++ b/localization/en.json @@ -141,5 +141,8 @@ "CONTEXT_MENU___GO_TO_DEFINITION": "Go to User-Defined Type", "DOCUMENTATION___DB_DEFINITIONS": "User-Defined Types", "CONTEXT_MENU___CONVERT_TO_PATTERN_FIELD": "Convert to Pattern Column", - "CONTEXT_MENU___CONVERT_PATTERN_TO_REGULAR_FIELD": "Convert to Regular Column" + "CONTEXT_MENU___CONVERT_PATTERN_TO_REGULAR_FIELD": "Convert to Regular Column", + "FE_LEVEL_SELECTOR_TITLE": "Schemas", + "FE_LEVEL_SELECTOR_CONTAINER": "Individual", + "FE_LEVEL_SELECTOR_MODEL": "Combined" } \ No newline at end of file From 1631be64822683900af7b9a477bfca4dafd77f4b Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 11:34:12 +0300 Subject: [PATCH 48/69] Indexes: fixed columns in indexes --- forward_engineering/ddlProvider.js | 4 +++- properties_pane/entity_level/entityLevelConfig.json | 2 +- reverse_engineering/helpers/postgresHelpers/tableHelper.js | 7 ++++--- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 021d12e..5ffd2ad 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -203,7 +203,9 @@ module.exports = (baseProvider, options, app) => { const using = index.index_method ? ` USING ${_.toUpper(index.index_method)}` : ''; const keys = getIndexKeys( - index.index_method === 'btree' ? index.btree_columns : index.columns, + index.index_method === 'btree' + ? index.columns + : _.map(index.columns, column => _.omit(column, 'sortOrder', 'nullsOrder')), isParentActivated ); const options = getIndexOptions(index, isParentActivated); diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 71aa440..b2465bc 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -844,7 +844,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Columns", - "propertyKeyword": "btree_columns", + "propertyKeyword": "columns", "propertyType": "fieldList", "template": "orderedList", "propertyTooltip": "The name of a column of the table.", diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 264e69d..4537109 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -215,9 +215,10 @@ const prepareTableIndexes = tableIndexesResult => { unique: indexData.index_unique ?? false, index_tablespace_name: indexData.tablespace_name, index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters), - ...(indexData.index_method === 'btree' - ? { btree_columns: columns } - : { columns: _.map(columns, column => _.omit(column, 'sortOrder', 'nullsOrder')) }), + columns: + indexData.index_method === 'btree' + ? columns + : _.map(columns, column => _.omit(column, 'sortOrder', 'nullsOrder')), }; return clearEmptyPropertiesInObject(index); From caa40ffe4eabeb9df9d7c67dc983fc425e5b738c Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 14:27:25 +0300 Subject: [PATCH 49/69] Fixed re and fe of storage options --- forward_engineering/helpers/tableHelper.js | 3 +- .../entity_level/entityLevelConfig.json | 8 +- .../helpers/postgresHelpers/tableHelper.js | 78 ++++++++++++++----- .../helpers/postgresService.js | 3 +- reverse_engineering/helpers/queryConstants.js | 8 ++ 5 files changed, 74 insertions(+), 26 deletions(-) diff --git a/forward_engineering/helpers/tableHelper.js b/forward_engineering/helpers/tableHelper.js index 39bd99f..4176b2d 100644 --- a/forward_engineering/helpers/tableHelper.js +++ b/forward_engineering/helpers/tableHelper.js @@ -18,9 +18,9 @@ module.exports = ({ _, getColumnsList, checkAllKeysDeactivated }) => { { key: 'inherits', getValue: getBasicValue('INHERITS') }, { key: 'partitioning', getValue: getPartitioning }, { key: 'usingMethod', getValue: getBasicValue('USING') }, + { key: 'storage_parameter', getValue: getStorageParameters }, { key: 'on_commit', getValue: getOnCommit }, { key: 'table_tablespace_name', getValue: getBasicValue('TABLESPACE') }, - { key: 'storage_parameter', getValue: getStorageParameters }, { key: 'selectStatement', getValue: getBasicValue('AS') }, ] .map(config => wrap(config.getValue(tableData[config.key], tableData))) @@ -61,7 +61,6 @@ module.exports = ({ _, getColumnsList, checkAllKeysDeactivated }) => { const toastKeys = [ 'toast_autovacuum_enabled', - 'toast_tuple_target', 'toast_vacuum_index_cleanup', 'toast_vacuum_truncate', 'toast_autovacuum_vacuum_threshold', diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index b2465bc..ec81e79 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -675,7 +675,9 @@ making sure that you maintain a proper JSON format. "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", - "template": "textarea" + "template": "textarea", + "markdown": false, + "addTimestampButton": false }, { "propertyName": "Index tablespace", @@ -722,7 +724,9 @@ making sure that you maintain a proper JSON format. "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", - "template": "textarea" + "template": "textarea", + "markdown": false, + "addTimestampButton": false }, { "propertyName": "Index tablespace", diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 4537109..750b484 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -6,12 +6,13 @@ const setDependencies = app => { _ = app.require('lodash'); }; -const prepareStorageParameters = reloptions => { - if (!reloptions) { +const prepareStorageParameters = (reloptions, tableToastOptions) => { + if (!reloptions && !tableToastOptions) { return null; } - const options = _.fromPairs(_.map(reloptions, splitByEqualitySymbol)); + const options = prepareOptions(reloptions); + const toastOptions = prepareOptions(tableToastOptions?.toast_options); const fillfactor = options.fillfactor; const parallel_workers = options.parallel_workers; @@ -36,24 +37,24 @@ const prepareStorageParameters = reloptions => { log_autovacuum_min_duration: options.log_autovacuum_min_duration, }; const user_catalog_table = options.user_catalog_table; - const toast_autovacuum_enabled = options['toast.autovacuum_enabled']; + const toast_autovacuum_enabled = toastOptions.autovacuum_enabled; const toast = { toast_tuple_target: options.toast_tuple_target, - toast_vacuum_index_cleanup: options['toast.vacuum_index_cleanup'], - toast_vacuum_truncate: options['toast.vacuum_truncate'], - toast_autovacuum_vacuum_threshold: options['toast.autovacuum_vacuum_threshold'], - toast_autovacuum_vacuum_scale_factor: options['toast.autovacuum_vacuum_scale_factor'], - toast_autovacuum_vacuum_insert_threshold: options['toast.autovacuum_vacuum_insert_threshold'], - toast_autovacuum_vacuum_insert_scale_factor: options['toast.autovacuum_vacuum_insert_scale_factor'], - toast_autovacuum_vacuum_cost_delay: options['toast.autovacuum_vacuum_cost_delay'], - toast_autovacuum_vacuum_cost_limit: options['toast.autovacuum_vacuum_cost_limit'], - toast_autovacuum_freeze_min_age: options['toast.autovacuum_freeze_min_age'], - toast_autovacuum_freeze_max_age: options['toast.autovacuum_freeze_max_age'], - toast_autovacuum_freeze_table_age: options['toast.autovacuum_freeze_table_age'], - toast_autovacuum_multixact_freeze_min_age: options['toast.autovacuum_multixact_freeze_min_age'], - toast_autovacuum_multixact_freeze_max_age: options['toast.autovacuum_multixact_freeze_max_age'], - toast_autovacuum_multixact_freeze_table_age: options['toast.autovacuum_multixact_freeze_table_age'], - toast_log_autovacuum_min_duration: options['toast.log_autovacuum_min_duration'], + toast_vacuum_index_cleanup: toastOptions.vacuum_index_cleanup, + toast_vacuum_truncate: toastOptions.vacuum_truncate, + toast_autovacuum_vacuum_threshold: toastOptions.autovacuum_vacuum_threshold, + toast_autovacuum_vacuum_scale_factor: toastOptions.autovacuum_vacuum_scale_factor, + toast_autovacuum_vacuum_insert_threshold: toastOptions.autovacuum_vacuum_insert_threshold, + toast_autovacuum_vacuum_insert_scale_factor: toastOptions.autovacuum_vacuum_insert_scale_factor, + toast_autovacuum_vacuum_cost_delay: toastOptions.autovacuum_vacuum_cost_delay, + toast_autovacuum_vacuum_cost_limit: toastOptions.autovacuum_vacuum_cost_limit, + toast_autovacuum_freeze_min_age: toastOptions.autovacuum_freeze_min_age, + toast_autovacuum_freeze_max_age: toastOptions.autovacuum_freeze_max_age, + toast_autovacuum_freeze_table_age: toastOptions.autovacuum_freeze_table_age, + toast_autovacuum_multixact_freeze_min_age: toastOptions.autovacuum_multixact_freeze_min_age, + toast_autovacuum_multixact_freeze_max_age: toastOptions.autovacuum_multixact_freeze_max_age, + toast_autovacuum_multixact_freeze_table_age: toastOptions.autovacuum_multixact_freeze_table_age, + toast_log_autovacuum_min_duration: toastOptions.log_autovacuum_min_duration, }; const storage_parameter = { @@ -277,10 +278,10 @@ const getIndexStorageParameters = storageParameters => { return clearEmptyPropertiesInObject(data); }; -const prepareTableLevelData = tableLevelData => { +const prepareTableLevelData = (tableLevelData, tableToastOptions) => { const temporary = tableLevelData?.relpersistence === 't'; const unlogged = tableLevelData?.relpersistence === 'u'; - const storage_parameter = prepareStorageParameters(tableLevelData?.reloptions); + const storage_parameter = prepareStorageParameters(tableLevelData?.reloptions, tableToastOptions); const table_tablespace_name = tableLevelData?.spcname; return { @@ -291,6 +292,41 @@ const prepareTableLevelData = tableLevelData => { }; }; +const convertValueToType = value => { + switch (getTypeOfValue(value)) { + case 'number': + case 'boolean': + return JSON.parse(value); + case 'string': + default: + return value; + } +}; + +const getTypeOfValue = value => { + try { + const type = typeof JSON.parse(value); + + if (type === 'object') { + return 'string'; + } + + return type; + } catch (error) { + return 'string'; + } +}; + +const prepareOptions = options => { + return ( + _.chain(options) + .map(splitByEqualitySymbol) + .map(([key, value]) => [key, convertValueToType(value)]) + .fromPairs() + .value() || {} + ); +}; + module.exports = { prepareStorageParameters, prepareTablePartition, diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 24a4b91..f0b2c1a 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -212,6 +212,7 @@ module.exports = { ); const tableOid = tableLevelData?.oid; + const tableToastOptions = await db.queryTolerant(queryConstants.GET_TABLE_TOAST_OPTIONS, [tableName, schemaOid], true); const partitionResult = await db.queryTolerant(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid], true); const tableColumns = await this._getTableColumns(tableName, schemaName, tableOid); const descriptionResult = await db.queryTolerant(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid], true); @@ -221,7 +222,7 @@ module.exports = { const tableForeignKeys = await db.queryTolerant(queryConstants.GET_TABLE_FOREIGN_KEYS, [tableOid]); const partitioning = prepareTablePartition(partitionResult, tableColumns); - const tableLevelProperties = prepareTableLevelData(tableLevelData); + const tableLevelProperties = prepareTableLevelData(tableLevelData, tableToastOptions); const description = getDescriptionFromResult(descriptionResult); const inherits = inheritsResult?.parent_table_name ? [schemaName, inheritsResult?.parent_table_name] : null; const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 33f6720..43aee60 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -14,6 +14,14 @@ const queryConstants = { LEFT JOIN pg_catalog.pg_tablespace AS pt ON pc.reltablespace = pt.oid WHERE pc.relname = $1 AND pc.relnamespace = $2;`, + GET_TABLE_TOAST_OPTIONS: ` + SELECT reloptions AS toast_options + FROM pg_catalog.pg_class + WHERE oid = + (SELECT reltoastrelid + FROM pg_catalog.pg_class + WHERE relname=$1 AND relnamespace = $2 + LIMIT 1)`, GET_TABLE_PARTITION_DATA: ` SELECT partstrat as partition_method, partattrs::int2[] as partition_attributes_positions, From 8f1b5f295e26596152603bfc4234334949661cc3 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 14:40:03 +0300 Subject: [PATCH 50/69] storage parameters default values and dependencies --- forward_engineering/helpers/tableHelper.js | 2 - .../entity_level/entityLevelConfig.json | 103 ++++++------------ 2 files changed, 32 insertions(+), 73 deletions(-) diff --git a/forward_engineering/helpers/tableHelper.js b/forward_engineering/helpers/tableHelper.js index 4176b2d..4edc75e 100644 --- a/forward_engineering/helpers/tableHelper.js +++ b/forward_engineering/helpers/tableHelper.js @@ -86,8 +86,6 @@ module.exports = ({ _, getColumnsList, checkAllKeysDeactivated }) => { const keysToSkip = ['autovacuum', 'toast', 'id']; return _.chain(value) - .thru(value => (value.autovacuum_enabled ? value : _.omit(value, 'autovacuum'))) - .thru(value => (value.toast_autovacuum_enabled ? value : _.omit(value, 'toast'))) .toPairs() .flatMap(([key, value]) => { if (key === 'autovacuum' || key === 'toast') { diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index ec81e79..2cbbd2b 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -254,15 +254,13 @@ making sure that you maintain a proper JSON format. "propertyName": "Vacuum index cleanup", "propertyKeyword": "vacuum_index_cleanup", "propertyType": "checkbox", - "propertyTooltip": "Disabling index cleanup can speed up VACUUM very significantly, but may also lead to severely bloated indexes if table modifications are frequent.", - "defaultValue": true + "propertyTooltip": "Disabling index cleanup can speed up VACUUM very significantly, but may also lead to severely bloated indexes if table modifications are frequent." }, { "propertyName": "Vacuum truncate", "propertyKeyword": "vacuum_truncate", "propertyType": "checkbox", - "propertyTooltip": "If true, VACUUM and autovacuum do the truncation and the disk space for the truncated pages is returned to the operating system.", - "defaultValue": true + "propertyTooltip": "If true, VACUUM and autovacuum do the truncation and the disk space for the truncated pages is returned to the operating system." }, { "propertyName": "Vacuum threshold", @@ -271,8 +269,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the minimum number of updated or deleted tuples needed to trigger a VACUUM in any one table.", "minValue": 0, - "step": 1, - "defaultValue": 50 + "step": 1 }, { "propertyName": "Vacuum scale factor", @@ -280,8 +277,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies a fraction of the table size to add to autovacuum_vacuum_threshold when deciding whether to trigger a VACUUM.", - "minValue": 0, - "defaultValue": 0.2 + "minValue": 0 }, { "propertyName": "Insert threshold", @@ -290,8 +286,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the number of inserted tuples needed to trigger a VACUUM in any one table. ", "minValue": -1, - "step": 1, - "defaultValue": 1000 + "step": 1 }, { "propertyName": "Insert scale factor", @@ -299,8 +294,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies a fraction of the table size to add to autovacuum_vacuum_insert_threshold when deciding whether to trigger a VACUUM.", - "minValue": 0, - "defaultValue": 0.2 + "minValue": 0 }, { "propertyName": "Analyze threshold", @@ -309,8 +303,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the minimum number of inserted, updated or deleted tuples needed to trigger an ANALYZE in any one table. ", "minValue": 0, - "step": 1, - "defaultValue": 50 + "step": 1 }, { "propertyName": "Analyze scale factor", @@ -318,8 +311,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies a fraction of the table size to add to autovacuum_analyze_threshold when deciding whether to trigger an ANALYZE. ", - "minValue": 0, - "defaultValue": 0.1 + "minValue": 0 }, { "propertyName": "Vacuum cost delay", @@ -327,8 +319,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies the cost delay value that will be used in automatic VACUUM operations. If -1 is specified, the regular vacuum_cost_delay value will be used. The value is specified as milliseconds.", - "minValue": -1, - "defaultValue": 2 + "minValue": -1 }, { "propertyName": "Vacuum cost limit", @@ -346,8 +337,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the cutoff age (in transactions) that VACUUM should use to decide whether to freeze row versions while scanning a table.", "minValue": 0, - "step": 1, - "defaultValue": 50000000 + "step": 1 }, { "propertyName": "Freeze max age", @@ -356,8 +346,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the maximum age (in transactions) that a table's pg_class.relfrozenxid field can attain before a VACUUM operation is forced to prevent transaction ID wraparound within the table.", "minValue": 0, - "step": 1, - "defaultValue": 200000000 + "step": 1 }, { "propertyName": "Freeze table age", @@ -367,8 +356,7 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Although users can set this value anywhere from zero to two billion, VACUUM will silently limit the effective value to 95% of autovacuum_freeze_max_age.", "minValue": 0, "maxValue": 2000000000, - "step": 1, - "defaultValue": 150000000 + "step": 1 }, { "propertyName": "Multixact freeze min age", @@ -377,8 +365,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the cutoff age (in multixacts) that VACUUM should use to decide whether to replace multixact IDs with a newer transaction ID or multixact ID while scanning a table.", "minValue": 0, - "step": 1, - "defaultValue": 5000000 + "step": 1 }, { "propertyName": "Multixact freeze max age", @@ -388,8 +375,7 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Specifies the maximum age (in multixacts) that a table's pg_class.relminmxid field can attain before a VACUUM operation is forced to prevent multixact ID wraparound within the table.", "minValue": 0, "maxValue": 4000000000, - "step": 1, - "defaultValue": 5000000 + "step": 1 }, { "propertyName": "Multixact freeze table age", @@ -398,9 +384,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Although users can set this value anywhere from zero to two billion, VACUUM will silently limit the effective value to 95% of autovacuum_multixact_freeze_max_age, so that a periodic manual VACUUM has a chance to run before an anti-wraparound is launched for the table.", "minValue": 0, - "maxValue": 2000000000, - "step": 1, - "defaultValue": 150000000 + "defaultValue": true }, { "propertyName": "Log min duration", @@ -409,14 +393,9 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "It is taken as milliseconds. Causes each action executed by autovacuum to be logged if it ran for at least the specified amount of time. Setting this to zero logs all autovacuum actions. -1 (the default) disables logging autovacuum actions.", "minValue": -1, - "step": 1, - "defaultValue": -1 + "step": 1 } - ], - "dependency": { - "key": "autovacuum_enabled", - "value": true - } + ] }, { "propertyName": "Enable TOAST autovacuum", @@ -444,15 +423,13 @@ making sure that you maintain a proper JSON format. "propertyName": "Vacuum index cleanup", "propertyKeyword": "toast_vacuum_index_cleanup", "propertyType": "checkbox", - "propertyTooltip": "Disabling index cleanup can speed up VACUUM very significantly, but may also lead to severely bloated indexes if table modifications are frequent.", - "defaultValue": true + "propertyTooltip": "Disabling index cleanup can speed up VACUUM very significantly, but may also lead to severely bloated indexes if table modifications are frequent." }, { "propertyName": "Vacuum truncate", "propertyKeyword": "toast_vacuum_truncate", "propertyType": "checkbox", - "propertyTooltip": "If true, VACUUM and autovacuum do the truncation and the disk space for the truncated pages is returned to the operating system.", - "defaultValue": true + "propertyTooltip": "If true, VACUUM and autovacuum do the truncation and the disk space for the truncated pages is returned to the operating system." }, { "propertyName": "Vacuum threshold", @@ -461,8 +438,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the minimum number of updated or deleted tuples needed to trigger a VACUUM in any one table.", "minValue": 0, - "step": 1, - "defaultValue": 50 + "step": 1 }, { "propertyName": "Vacuum scale factor", @@ -470,8 +446,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies a fraction of the table size to add to autovacuum_vacuum_threshold when deciding whether to trigger a VACUUM.", - "minValue": 0, - "defaultValue": 0.2 + "minValue": 0 }, { "propertyName": "Insert threshold", @@ -480,8 +455,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the number of inserted tuples needed to trigger a VACUUM in any one table. ", "minValue": -1, - "step": 1, - "defaultValue": 1000 + "step": 1 }, { "propertyName": "Insert scale factor", @@ -489,8 +463,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies a fraction of the table size to add to autovacuum_vacuum_insert_threshold when deciding whether to trigger a VACUUM.", - "minValue": 0, - "defaultValue": 0.2 + "minValue": 0 }, { "propertyName": "Vacuum cost delay", @@ -498,8 +471,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Specifies the cost delay value that will be used in automatic VACUUM operations. If -1 is specified, the regular vacuum_cost_delay value will be used. The value is specified as milliseconds.", - "minValue": -1, - "defaultValue": 2 + "minValue": -1 }, { "propertyName": "Vacuum cost limit", @@ -517,8 +489,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the cutoff age (in transactions) that VACUUM should use to decide whether to freeze row versions while scanning a table.", "minValue": 0, - "step": 1, - "defaultValue": 50000000 + "step": 1 }, { "propertyName": "Freeze max age", @@ -527,8 +498,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the maximum age (in transactions) that a table's pg_class.relfrozenxid field can attain before a VACUUM operation is forced to prevent transaction ID wraparound within the table.", "minValue": 0, - "step": 1, - "defaultValue": 200000000 + "step": 1 }, { "propertyName": "Freeze table age", @@ -538,8 +508,7 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Although users can set this value anywhere from zero to two billion, VACUUM will silently limit the effective value to 95% of autovacuum_freeze_max_age.", "minValue": 0, "maxValue": 2000000000, - "step": 1, - "defaultValue": 150000000 + "step": 1 }, { "propertyName": "Multixact freeze min age", @@ -548,8 +517,7 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "Specifies the cutoff age (in multixacts) that VACUUM should use to decide whether to replace multixact IDs with a newer transaction ID or multixact ID while scanning a table.", "minValue": 0, - "step": 1, - "defaultValue": 5000000 + "step": 1 }, { "propertyName": "Multixact freeze max age", @@ -559,8 +527,7 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Specifies the maximum age (in multixacts) that a table's pg_class.relminmxid field can attain before a VACUUM operation is forced to prevent multixact ID wraparound within the table.", "minValue": 0, "maxValue": 4000000000, - "step": 1, - "defaultValue": 5000000 + "step": 1 }, { "propertyName": "Multixact freeze table age", @@ -570,8 +537,7 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Although users can set this value anywhere from zero to two billion, VACUUM will silently limit the effective value to 95% of autovacuum_multixact_freeze_max_age, so that a periodic manual VACUUM has a chance to run before an anti-wraparound is launched for the table.", "minValue": 0, "maxValue": 2000000000, - "step": 1, - "defaultValue": 150000000 + "step": 1 }, { "propertyName": "Log min duration", @@ -580,14 +546,9 @@ making sure that you maintain a proper JSON format. "valueType": "number", "propertyTooltip": "It is taken as milliseconds. Causes each action executed by autovacuum to be logged if it ran for at least the specified amount of time. Setting this to zero logs all autovacuum actions. -1 (the default) disables logging autovacuum actions.", "minValue": -1, - "step": 1, - "defaultValue": -1 + "step": 1 } - ], - "dependency": { - "key": "toast_autovacuum_enabled", - "value": true - } + ] }, { "propertyName": "User catalog table", From 5585a98c7f2614e7a7ce6557208b7cdcc9edbe11 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 15:23:58 +0300 Subject: [PATCH 51/69] Fixed partition key. Fixed columns fe of default value --- .../helpers/columnDefinitionHelper.js | 2 +- properties_pane/entity_level/entityLevelConfig.json | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index 3fc4e90..e4932d6 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -65,7 +65,7 @@ module.exports = ({ const decorateDefault = (type, defaultValue) => { const constantsValues = ['current_timestamp', 'null']; if ((isString(type) || isDateTime(type)) && !constantsValues.includes(_.toLower(defaultValue))) { - return wrap(escapeQuotes(defaultValue), '"', '"'); + return wrap(escapeQuotes(defaultValue), "'", "'"); } else { return defaultValue; } diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 2cbbd2b..1c341d4 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -176,7 +176,7 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "partitionBy", "propertyTooltip": "", "propertyType": "select", - "defaultValue": "", + "defaultValue": "keys", "options": [ "keys", "expression" @@ -185,8 +185,12 @@ making sure that you maintain a proper JSON format. { "propertyName": "Partition key", "propertyKeyword": "compositePartitionKey", - "propertyType": "primaryKeySetter", - "abbr": "pk,PK", + "propertyType": "compositeKeySetter", + "disabledItemStrategy": "default", + "isCompositeKey": true, + "setPrimaryKey": false, + "template": "collectiontree", + "abbr": "PK", "dependency": { "key": "partitionBy", "value": "keys" @@ -383,8 +387,7 @@ making sure that you maintain a proper JSON format. "propertyType": "numeric", "valueType": "number", "propertyTooltip": "Although users can set this value anywhere from zero to two billion, VACUUM will silently limit the effective value to 95% of autovacuum_multixact_freeze_max_age, so that a periodic manual VACUUM has a chance to run before an anti-wraparound is launched for the table.", - "minValue": 0, - "defaultValue": true + "minValue": 0 }, { "propertyName": "Log min duration", From b1e422354148635903f38a9ebf2fa8b678cf89da Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 16:37:27 +0300 Subject: [PATCH 52/69] FE: reverted old logic of creating functions and udfs --- forward_engineering/ddlProvider.js | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 5ffd2ad..83415cd 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -99,7 +99,10 @@ module.exports = (baseProvider, options, app) => { comment: comments ? comment : '', }); - return schemaStatement; + const createFunctionStatement = getFunctionsScript(databaseName, udfs); + const createProceduresStatement = getProceduresScript(databaseName, procedures); + + return _.trim([schemaStatement, createFunctionStatement, createProceduresStatement].join('\n\n')); }, createTable( @@ -381,14 +384,6 @@ module.exports = (baseProvider, options, app) => { return getUserDefinedType(udt, columns); }, - createFunctions({ databaseName, udfs }) { - return getFunctionsScript(databaseName, udfs); - }, - - createProcedures({ databaseName, procedures }) { - return getProceduresScript(databaseName, procedures); - }, - getDefaultType(type) { return defaultTypes[type]; }, From 46cc1b6fca33ebefa3942bd4a1dc8046a1c0db08 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 16:39:10 +0300 Subject: [PATCH 53/69] RE: fixed re of and properties of indexes --- .../helpers/postgresHelpers/tableHelper.js | 11 +++++++++-- reverse_engineering/helpers/queryConstants.js | 6 ++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 750b484..70163c4 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -208,14 +208,21 @@ const getCheckConstraint = constraint => { const prepareTableIndexes = tableIndexesResult => { return _.map(tableIndexesResult, indexData => { - const columns = mapIndexColumns(indexData); + const allColumns = mapIndexColumns(indexData); + const columns = _.slice(allColumns, 0, indexData.number_of_keys); + const include = _.chain(allColumns) + .slice(indexData.number_of_keys) + .map(column => _.pick(column, 'name')) + .value(); const index = { indxName: indexData.indexname, index_method: indexData.index_method, unique: indexData.index_unique ?? false, - index_tablespace_name: indexData.tablespace_name, + index_tablespace_name: indexData.tablespace_name || '', index_storage_parameter: getIndexStorageParameters(indexData.storage_parameters), + where: indexData.where_expression || '', + include, columns: indexData.index_method === 'btree' ? columns diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 43aee60..65e0bae 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -67,6 +67,8 @@ const queryConstants = { SELECT indexname, index_method, index_unique, + number_of_keys, + where_expression, array_agg(attname ORDER BY ord)::text[] AS columns, array_agg(coll @@ -90,6 +92,8 @@ const queryConstants = { attribute.attname, c.reloptions, tablespace_t.spcname AS tablespace_name, + indexes.indnkeyatts AS number_of_keys, + pg_catalog.pg_get_expr(indpred, indrelid) AS where_expression, CASE WHEN collation_namespace.nspname is not null THEN format('%I.%I',collation_namespace.nspname,collation_t.collname) END AS coll, @@ -126,6 +130,8 @@ const queryConstants = { index_method, index_unique, reloptions, + number_of_keys, + where_expression, tablespace_name;`, GET_TABLE_FOREIGN_KEYS: ` SELECT pcon.conname AS relationship_name, From 227ff3a44ce1db941aefaff8146cc2e6538e692d Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 13 Oct 2021 16:44:34 +0300 Subject: [PATCH 54/69] Bump Hackolade version --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index acdbe7b..6fc791c 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "versionDate": "2021-09-01", "author": "hackolade", "engines": { - "hackolade": "5.1.3", + "hackolade": "5.2.6", "hackoladePlugin": "1.2.0" }, "contributes": { From a970e07d8935a88608ae8a5c808052f8722d9153 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 15 Oct 2021 11:19:22 +0300 Subject: [PATCH 55/69] Fixed issue with multirange types, fixed names in configs --- forward_engineering/ddlProvider.js | 12 ++- .../helpers/columnDefinitionHelper.js | 43 +++++++++ .../entity_level/entityLevelConfig.json | 2 +- reverse_engineering/config.json | 13 ++- .../node_modules/asn1/package.json | 91 +++++------------- .../node_modules/buffer-writer/package.json | 65 ++++--------- .../node_modules/debug/package.json | 90 ++++++------------ .../node_modules/inherits/package.json | 62 ++++--------- .../node_modules/lodash.defaults/package.json | 83 ++++------------- .../node_modules/ms/package.json | 79 +++++----------- .../node_modules/packet-reader/package.json | 61 ++++-------- .../pg-connection-string/package.json | 81 ++++++---------- .../node_modules/pg-int8/package.json | 78 ++++++---------- .../node_modules/pg-pool/package.json | 79 ++++++---------- .../node_modules/pg-protocol/package.json | 69 +++++--------- .../node_modules/pg-types/package.json | 85 +++++++---------- .../node_modules/pg/package.json | 92 +++++++------------ .../node_modules/pgpass/package.json | 67 ++++---------- .../node_modules/postgres-array/package.json | 74 +++++---------- .../node_modules/postgres-bytea/package.json | 72 +++++---------- .../node_modules/postgres-date/package.json | 72 +++++---------- .../postgres-interval/package.json | 76 +++++---------- .../node_modules/readable-stream/package.json | 89 +++++++----------- .../node_modules/safe-buffer/package.json | 67 +++++--------- .../node_modules/safer-buffer/package.json | 61 ++++-------- .../node_modules/semver/package.json | 71 ++++---------- .../node_modules/split2/package.json | 79 ++++++---------- .../node_modules/ssh2-streams/package.json | 83 ++++------------- .../node_modules/ssh2/package.json | 79 +++------------- .../node_modules/streamsearch/package.json | 72 +++------------ .../node_modules/string_decoder/package.json | 64 ++++--------- .../node_modules/tunnel-ssh/package.json | 74 +++++---------- .../node_modules/util-deprecate/package.json | 63 ++++--------- .../node_modules/xtend/package.json | 87 ++++++------------ reverse_engineering/package-lock.json | 2 +- reverse_engineering/package.json | 2 +- types/multirange.json | 2 +- 37 files changed, 744 insertions(+), 1597 deletions(-) diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 83415cd..c600c60 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -75,7 +75,7 @@ module.exports = (baseProvider, options, app) => { getColumnsList, }); - const { decorateType, decorateDefault, getColumnComments } = require('./helpers/columnDefinitionHelper')({ + const { decorateType, decorateDefault, getColumnComments, replaceTypeByVersion } = require('./helpers/columnDefinitionHelper')({ _, wrap, assignTemplates, @@ -173,18 +173,19 @@ module.exports = (baseProvider, options, app) => { }, convertColumnDefinition(columnDefinition) { + const type = replaceTypeByVersion(columnDefinition.type, columnDefinition.dbVersion); const notNull = columnDefinition.nullable ? '' : ' NOT NULL'; const primaryKey = columnDefinition.primaryKey ? ' PRIMARY KEY' : ''; const uniqueKey = columnDefinition.unique ? ' UNIQUE' : ''; const collation = columnDefinition.collationRule ? ` COLLATE "${columnDefinition.collationRule}"` : ''; const defaultValue = !_.isUndefined(columnDefinition.default) - ? ' DEFAULT ' + decorateDefault(columnDefinition.type, columnDefinition.default) + ? ' DEFAULT ' + decorateDefault(type, columnDefinition.default) : ''; return commentIfDeactivated( assignTemplates(templates.columnDefinition, { name: wrapInQuotes(columnDefinition.name), - type: decorateType(columnDefinition.type, columnDefinition), + type: decorateType(type, columnDefinition), notNull, primaryKey, uniqueKey, @@ -404,6 +405,7 @@ module.exports = (baseProvider, options, app) => { const timePrecision = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.timePrecision : ''; const with_timezone = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.with_timezone : ''; const intervalOptions = columnDefinition.type === 'interval' ? jsonSchema.intervalOptions : ''; + const dbVersion = dbData.dbVersion; return { name: columnDefinition.name, @@ -431,6 +433,7 @@ module.exports = (baseProvider, options, app) => { timePrecision, with_timezone, intervalOptions, + dbVersion, }; }, @@ -451,12 +454,15 @@ module.exports = (baseProvider, options, app) => { }, hydrateDatabase(containerData, data) { + const dbVersion = _.get(data, 'modelData.0.dbVersion'); + return { databaseName: containerData.name, ifNotExist: containerData.ifNotExist, comments: containerData.description, udfs: data?.udfs || [], procedures: data?.procedures || [], + dbVersion }; }, diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index e4932d6..9116103 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -87,9 +87,52 @@ module.exports = ({ .value(); }; + const TYPES_MAPPING_BY_VERSION = { + 'v13.x': { + int4multirange: 'int4range', + int8multirange: 'int8range', + nummultirange: 'numrange', + datemultirange: 'daterange', + tsmultirange: 'tsrange', + tstzmultirange: 'tstzrange', + }, + 'v12.x': { + int4multirange: 'int4range', + int8multirange: 'int8range', + nummultirange: 'numrange', + datemultirange: 'daterange', + tsmultirange: 'tsrange', + tstzmultirange: 'tstzrange', + }, + 'v11.x': { + int4multirange: 'int4range', + int8multirange: 'int8range', + nummultirange: 'numrange', + datemultirange: 'daterange', + tsmultirange: 'tsrange', + tstzmultirange: 'tstzrange', + }, + 'v10.x': { + int4multirange: 'int4range', + int8multirange: 'int8range', + nummultirange: 'numrange', + datemultirange: 'daterange', + tsmultirange: 'tsrange', + tstzmultirange: 'tstzrange', + }, + }; + + const replaceTypeByVersion = (type, dbVersion) => { + const dbVersionMap = TYPES_MAPPING_BY_VERSION[dbVersion]; + const replacedType = _.get(dbVersionMap, type); + + return replacedType || type; + }; + return { decorateType, decorateDefault, getColumnComments, + replaceTypeByVersion, }; }; diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index 1c341d4..e60d0db 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -138,7 +138,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "If not exists", "propertyKeyword": "ifNotExist", - "propertyTooltip": "When the IF NOT EXISTS clause is used, MariaDB will return a warning instead of an error if the specified table already exists. Cannot be used together with the OR REPLACE clause.", + "propertyTooltip": "When the IF NOT EXISTS clause is used, PostgreSQL will return a warning instead of an error if the specified table already exists. Cannot be used together with the OR REPLACE clause.", "propertyType": "checkbox", "defaultValue": true }, diff --git a/reverse_engineering/config.json b/reverse_engineering/config.json index f9687fc..bfb9eee 100644 --- a/reverse_engineering/config.json +++ b/reverse_engineering/config.json @@ -3,7 +3,14 @@ "NO_DATABASES": "There is no databases in PostgreSQL Server instance", "WRONG_CONNECTION": "Can not connect to PostgreSQL Server instance" }, - "defaultDdlType": "mariadb", - "excludeDocKind": ["id"], - "connectionList": [ "name", "host", "port", "userName" ] + "defaultDdlType": "postgres", + "excludeDocKind": [ + "id" + ], + "connectionList": [ + "name", + "host", + "port", + "userName" + ] } \ No newline at end of file diff --git a/reverse_engineering/node_modules/asn1/package.json b/reverse_engineering/node_modules/asn1/package.json index e04eb7f..30e7afa 100644 --- a/reverse_engineering/node_modules/asn1/package.json +++ b/reverse_engineering/node_modules/asn1/package.json @@ -1,78 +1,35 @@ { - "_args": [ - [ - "asn1@0.2.4", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "asn1@0.2.4", - "_id": "asn1@0.2.4", - "_inBundle": false, - "_integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "_location": "/asn1", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "asn1@0.2.4", - "name": "asn1", - "escapedName": "asn1", - "rawSpec": "0.2.4", - "saveSpec": null, - "fetchSpec": "0.2.4" - }, - "_requiredBy": [ - "/ssh2-streams" - ], - "_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "_spec": "0.2.4", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "Joyent", - "url": "joyent.com" - }, - "bugs": { - "url": "https://github.com/joyent/node-asn1/issues" - }, + "author": "Joyent (joyent.com)", "contributors": [ - { - "name": "Mark Cavage", - "email": "mcavage@gmail.com" - }, - { - "name": "David Gwynne", - "email": "loki@animata.net" - }, - { - "name": "Yunong Xiao", - "email": "yunong@joyent.com" - }, - { - "name": "Alex Wilson", - "email": "alex.wilson@joyent.com" - } + "Mark Cavage ", + "David Gwynne ", + "Yunong Xiao ", + "Alex Wilson " ], + "name": "asn1", + "description": "Contains parsers and serializers for ASN.1 (currently BER only)", + "version": "0.2.4", + "repository": { + "type": "git", + "url": "git://github.com/joyent/node-asn1.git" + }, + "main": "lib/index.js", "dependencies": { "safer-buffer": "~2.1.0" }, - "description": "Contains parsers and serializers for ASN.1 (currently BER only)", "devDependencies": { - "eslint": "2.13.1", - "eslint-plugin-joyent": "~1.3.0", - "faucet": "0.0.1", "istanbul": "^0.3.6", - "tape": "^3.5.0" - }, - "homepage": "https://github.com/joyent/node-asn1#readme", - "license": "MIT", - "main": "lib/index.js", - "name": "asn1", - "repository": { - "type": "git", - "url": "git://github.com/joyent/node-asn1.git" + "faucet": "0.0.1", + "tape": "^3.5.0", + "eslint": "2.13.1", + "eslint-plugin-joyent": "~1.3.0" }, "scripts": { - "test": "tape ./test/ber/*.test.js" + "test": "./node_modules/.bin/tape ./test/ber/*.test.js" }, - "version": "0.2.4" -} + "license": "MIT" + +,"_resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz" +,"_integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==" +,"_from": "asn1@0.2.4" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/buffer-writer/package.json b/reverse_engineering/node_modules/buffer-writer/package.json index dc11b4b..04bd465 100644 --- a/reverse_engineering/node_modules/buffer-writer/package.json +++ b/reverse_engineering/node_modules/buffer-writer/package.json @@ -1,57 +1,30 @@ { - "_from": "buffer-writer@2.0.0", - "_id": "buffer-writer@2.0.0", - "_inBundle": false, - "_integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", - "_location": "/buffer-writer", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "buffer-writer@2.0.0", - "name": "buffer-writer", - "escapedName": "buffer-writer", - "rawSpec": "2.0.0", - "saveSpec": null, - "fetchSpec": "2.0.0" - }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", - "_shasum": "ce7eb81a38f7829db09c873f2fbb792c0c98ec04", - "_spec": "buffer-writer@2.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "author": { - "name": "Brian M. Carlson" - }, - "bugs": { - "url": "https://github.com/brianc/node-buffer-writer/issues" - }, - "bundleDependencies": false, - "deprecated": false, + "name": "buffer-writer", + "version": "2.0.0", "description": "a fast, efficient buffer writer", - "devDependencies": { - "mocha": "5.2.0" + "main": "index.js", + "scripts": { + "test": "mocha --throw-deprecation" }, - "engines": { - "node": ">=4" + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-buffer-writer.git" }, - "homepage": "https://github.com/brianc/node-buffer-writer#readme", "keywords": [ "buffer", "writer", "builder" ], + "author": "Brian M. Carlson", "license": "MIT", - "main": "index.js", - "name": "buffer-writer", - "repository": { - "type": "git", - "url": "git://github.com/brianc/node-buffer-writer.git" - }, - "scripts": { - "test": "mocha --throw-deprecation" + "devDependencies": { + "mocha": "5.2.0" }, - "version": "2.0.0" -} + "engines": { + "node": ">=4" + } + +,"_resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz" +,"_integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" +,"_from": "buffer-writer@2.0.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/debug/package.json b/reverse_engineering/node_modules/debug/package.json index d207019..28acbb0 100644 --- a/reverse_engineering/node_modules/debug/package.json +++ b/reverse_engineering/node_modules/debug/package.json @@ -1,61 +1,25 @@ { - "_args": [ - [ - "debug@2.6.9", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "debug@2.6.9", - "_id": "debug@2.6.9", - "_inBundle": false, - "_integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "_location": "/debug", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "debug@2.6.9", - "name": "debug", - "escapedName": "debug", - "rawSpec": "2.6.9", - "saveSpec": null, - "fetchSpec": "2.6.9" + "name": "debug", + "version": "2.6.9", + "repository": { + "type": "git", + "url": "git://github.com/visionmedia/debug.git" }, - "_requiredBy": [ - "/tunnel-ssh" + "description": "small debugging utility", + "keywords": [ + "debug", + "log", + "debugger" ], - "_resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "_spec": "2.6.9", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "TJ Holowaychuk", - "email": "tj@vision-media.ca" - }, - "browser": "./src/browser.js", - "bugs": { - "url": "https://github.com/visionmedia/debug/issues" - }, - "component": { - "scripts": { - "debug/index.js": "browser.js", - "debug/debug.js": "debug.js" - } - }, + "author": "TJ Holowaychuk ", "contributors": [ - { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io" - }, - { - "name": "Andrew Rhyne", - "email": "rhyneandrew@gmail.com" - } + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " ], + "license": "MIT", "dependencies": { "ms": "2.0.0" }, - "description": "small debugging utility", "devDependencies": { "browserify": "9.0.3", "chai": "^3.5.0", @@ -74,18 +38,16 @@ "sinon": "^1.17.6", "sinon-chai": "^2.8.0" }, - "homepage": "https://github.com/visionmedia/debug#readme", - "keywords": [ - "debug", - "log", - "debugger" - ], - "license": "MIT", "main": "./src/index.js", - "name": "debug", - "repository": { - "type": "git", - "url": "git://github.com/visionmedia/debug.git" - }, - "version": "2.6.9" -} + "browser": "./src/browser.js", + "component": { + "scripts": { + "debug/index.js": "browser.js", + "debug/debug.js": "debug.js" + } + } + +,"_resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" +,"_integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==" +,"_from": "debug@2.6.9" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/inherits/package.json b/reverse_engineering/node_modules/inherits/package.json index cdc11a1..7e156d1 100644 --- a/reverse_engineering/node_modules/inherits/package.json +++ b/reverse_engineering/node_modules/inherits/package.json @@ -1,42 +1,7 @@ { - "_from": "inherits@^2.0.3", - "_id": "inherits@2.0.4", - "_inBundle": false, - "_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "_location": "/inherits", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "inherits@^2.0.3", - "name": "inherits", - "escapedName": "inherits", - "rawSpec": "^2.0.3", - "saveSpec": null, - "fetchSpec": "^2.0.3" - }, - "_requiredBy": [ - "/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "_shasum": "0fa2c64f932917c3433a0ded55363aae37416b7c", - "_spec": "inherits@^2.0.3", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/readable-stream", - "browser": "./inherits_browser.js", - "bugs": { - "url": "https://github.com/isaacs/inherits/issues" - }, - "bundleDependencies": false, - "deprecated": false, + "name": "inherits", "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "devDependencies": { - "tap": "^14.2.4" - }, - "files": [ - "inherits.js", - "inherits_browser.js" - ], - "homepage": "https://github.com/isaacs/inherits#readme", + "version": "2.0.4", "keywords": [ "inheritance", "class", @@ -47,15 +12,22 @@ "browser", "browserify" ], - "license": "ISC", "main": "./inherits.js", - "name": "inherits", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/inherits.git" - }, + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", "scripts": { "test": "tap" }, - "version": "2.0.4" -} + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] + +,"_resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" +,"_integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" +,"_from": "inherits@2.0.4" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/lodash.defaults/package.json b/reverse_engineering/node_modules/lodash.defaults/package.json index 0df212e..e7efea3 100644 --- a/reverse_engineering/node_modules/lodash.defaults/package.json +++ b/reverse_engineering/node_modules/lodash.defaults/package.json @@ -1,72 +1,21 @@ { - "_args": [ - [ - "lodash.defaults@4.2.0", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "lodash.defaults@4.2.0", - "_id": "lodash.defaults@4.2.0", - "_inBundle": false, - "_integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=", - "_location": "/lodash.defaults", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "lodash.defaults@4.2.0", - "name": "lodash.defaults", - "escapedName": "lodash.defaults", - "rawSpec": "4.2.0", - "saveSpec": null, - "fetchSpec": "4.2.0" - }, - "_requiredBy": [ - "/tunnel-ssh" - ], - "_resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "_spec": "4.2.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "John-David Dalton", - "email": "john.david.dalton@gmail.com", - "url": "http://allyoucanleet.com/" - }, - "bugs": { - "url": "https://github.com/lodash/lodash/issues" - }, - "contributors": [ - { - "name": "John-David Dalton", - "email": "john.david.dalton@gmail.com", - "url": "http://allyoucanleet.com/" - }, - { - "name": "Blaine Bublitz", - "email": "blaine.bublitz@gmail.com", - "url": "https://github.com/phated" - }, - { - "name": "Mathias Bynens", - "email": "mathias@qiwi.be", - "url": "https://mathiasbynens.be/" - } - ], + "name": "lodash.defaults", + "version": "4.2.0", "description": "The lodash method `_.defaults` exported as a module.", "homepage": "https://lodash.com/", "icon": "https://lodash.com/icon.svg", - "keywords": [ - "lodash-modularized", - "defaults" - ], "license": "MIT", - "name": "lodash.defaults", - "repository": { - "type": "git", - "url": "git+https://github.com/lodash/lodash.git" - }, - "scripts": { - "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" - }, - "version": "4.2.0" -} + "keywords": "lodash-modularized, defaults", + "author": "John-David Dalton (http://allyoucanleet.com/)", + "contributors": [ + "John-David Dalton (http://allyoucanleet.com/)", + "Blaine Bublitz (https://github.com/phated)", + "Mathias Bynens (https://mathiasbynens.be/)" + ], + "repository": "lodash/lodash", + "scripts": { "test": "echo \"See https://travis-ci.org/lodash/lodash-cli for testing details.\"" } + +,"_resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz" +,"_integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" +,"_from": "lodash.defaults@4.2.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/ms/package.json b/reverse_engineering/node_modules/ms/package.json index cead0ad..8983d69 100644 --- a/reverse_engineering/node_modules/ms/package.json +++ b/reverse_engineering/node_modules/ms/package.json @@ -1,42 +1,16 @@ { - "_args": [ - [ - "ms@2.0.0", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "ms@2.0.0", - "_id": "ms@2.0.0", - "_inBundle": false, - "_integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "_location": "/ms", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "ms@2.0.0", - "name": "ms", - "escapedName": "ms", - "rawSpec": "2.0.0", - "saveSpec": null, - "fetchSpec": "2.0.0" - }, - "_requiredBy": [ - "/debug" - ], - "_resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "_spec": "2.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "bugs": { - "url": "https://github.com/zeit/ms/issues" - }, + "name": "ms", + "version": "2.0.0", "description": "Tiny milisecond conversion utility", - "devDependencies": { - "eslint": "3.19.0", - "expect.js": "0.3.1", - "husky": "0.13.3", - "lint-staged": "3.4.1", - "mocha": "3.4.1" + "repository": "zeit/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" }, "eslintConfig": { "extends": "eslint:recommended", @@ -45,11 +19,6 @@ "es6": true } }, - "files": [ - "index.js" - ], - "homepage": "https://github.com/zeit/ms#readme", - "license": "MIT", "lint-staged": { "*.js": [ "npm run lint", @@ -57,16 +26,16 @@ "git add" ] }, - "main": "./index", - "name": "ms", - "repository": { - "type": "git", - "url": "git+https://github.com/zeit/ms.git" - }, - "scripts": { - "lint": "eslint lib/* bin/*", - "precommit": "lint-staged", - "test": "mocha tests.js" - }, - "version": "2.0.0" -} + "license": "MIT", + "devDependencies": { + "eslint": "3.19.0", + "expect.js": "0.3.1", + "husky": "0.13.3", + "lint-staged": "3.4.1", + "mocha": "3.4.1" + } + +,"_resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" +,"_integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" +,"_from": "ms@2.0.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/packet-reader/package.json b/reverse_engineering/node_modules/packet-reader/package.json index 7349ce6..50d82a4 100644 --- a/reverse_engineering/node_modules/packet-reader/package.json +++ b/reverse_engineering/node_modules/packet-reader/package.json @@ -1,52 +1,29 @@ { - "_from": "packet-reader@1.0.0", - "_id": "packet-reader@1.0.0", - "_inBundle": false, - "_integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==", - "_location": "/packet-reader", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "packet-reader@1.0.0", - "name": "packet-reader", - "escapedName": "packet-reader", - "rawSpec": "1.0.0", - "saveSpec": null, - "fetchSpec": "1.0.0" - }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", - "_shasum": "9238e5480dedabacfe1fe3f2771063f164157d74", - "_spec": "packet-reader@1.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "author": { - "name": "Brian M. Carlson" - }, - "bugs": { - "url": "https://github.com/brianc/node-packet-reader/issues" - }, - "bundleDependencies": false, - "deprecated": false, + "name": "packet-reader", + "version": "1.0.0", "description": "Read binary packets...", - "devDependencies": { - "mocha": "~1.21.5" - }, + "main": "index.js", "directories": { "test": "test" }, - "homepage": "https://github.com/brianc/node-packet-reader", - "license": "MIT", - "main": "index.js", - "name": "packet-reader", + "scripts": { + "test": "mocha" + }, "repository": { "type": "git", "url": "git://github.com/brianc/node-packet-reader.git" }, - "scripts": { - "test": "mocha" + "author": "Brian M. Carlson", + "license": "MIT", + "bugs": { + "url": "https://github.com/brianc/node-packet-reader/issues" }, - "version": "1.0.0" -} + "homepage": "https://github.com/brianc/node-packet-reader", + "devDependencies": { + "mocha": "~1.21.5" + } + +,"_resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz" +,"_integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" +,"_from": "packet-reader@1.0.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-connection-string/package.json b/reverse_engineering/node_modules/pg-connection-string/package.json index 03b64fd..7d2e876 100644 --- a/reverse_engineering/node_modules/pg-connection-string/package.json +++ b/reverse_engineering/node_modules/pg-connection-string/package.json @@ -1,38 +1,31 @@ { - "_from": "pg-connection-string@^2.5.0", - "_id": "pg-connection-string@2.5.0", - "_inBundle": false, - "_integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==", - "_location": "/pg-connection-string", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pg-connection-string@^2.5.0", - "name": "pg-connection-string", - "escapedName": "pg-connection-string", - "rawSpec": "^2.5.0", - "saveSpec": null, - "fetchSpec": "^2.5.0" + "name": "pg-connection-string", + "version": "2.5.0", + "description": "Functions for dealing with a PostgresSQL connection string", + "main": "./index.js", + "types": "./index.d.ts", + "scripts": { + "test": "istanbul cover _mocha && npm run check-coverage", + "check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100", + "coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls" }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", - "_shasum": "538cadd0f7e603fc09a12590f3b8a452c2c0cf34", - "_spec": "pg-connection-string@^2.5.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "author": { - "name": "Blaine Bublitz", - "email": "blaine@iceddev.com", - "url": "http://iceddev.com/" + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-connection-string" }, + "keywords": [ + "pg", + "connection", + "string", + "parse" + ], + "author": "Blaine Bublitz (http://iceddev.com/)", + "license": "MIT", "bugs": { "url": "https://github.com/brianc/node-postgres/issues" }, - "bundleDependencies": false, - "deprecated": false, - "description": "Functions for dealing with a PostgresSQL connection string", + "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string", "devDependencies": { "chai": "^4.1.1", "coveralls": "^3.0.4", @@ -43,27 +36,9 @@ "index.js", "index.d.ts" ], - "gitHead": "d45947938263bec30a1e3252452f04177b785f66", - "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string", - "keywords": [ - "pg", - "connection", - "string", - "parse" - ], - "license": "MIT", - "main": "./index.js", - "name": "pg-connection-string", - "repository": { - "type": "git", - "url": "git://github.com/brianc/node-postgres.git", - "directory": "packages/pg-connection-string" - }, - "scripts": { - "check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100", - "coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls", - "test": "istanbul cover _mocha && npm run check-coverage" - }, - "types": "./index.d.ts", - "version": "2.5.0" -} + "gitHead": "d45947938263bec30a1e3252452f04177b785f66" + +,"_resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz" +,"_integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" +,"_from": "pg-connection-string@2.5.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-int8/package.json b/reverse_engineering/node_modules/pg-int8/package.json index 2964a2b..357bafb 100644 --- a/reverse_engineering/node_modules/pg-int8/package.json +++ b/reverse_engineering/node_modules/pg-int8/package.json @@ -1,52 +1,28 @@ { - "_from": "pg-int8@1.0.1", - "_id": "pg-int8@1.0.1", - "_inBundle": false, - "_integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", - "_location": "/pg-int8", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "pg-int8@1.0.1", - "name": "pg-int8", - "escapedName": "pg-int8", - "rawSpec": "1.0.1", - "saveSpec": null, - "fetchSpec": "1.0.1" - }, - "_requiredBy": [ - "/pg-types" - ], - "_resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", - "_shasum": "943bd463bf5b71b4170115f80f8efc9a0c0eb78c", - "_spec": "pg-int8@1.0.1", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", - "bugs": { - "url": "https://github.com/charmander/pg-int8/issues" - }, - "bundleDependencies": false, - "deprecated": false, - "description": "64-bit big-endian signed integer-to-string conversion", - "devDependencies": { - "@charmander/eslint-config-base": "1.0.2", - "tap": "10.7.3" - }, - "engines": { - "node": ">=4.0.0" - }, - "files": [ - "index.js" - ], - "homepage": "https://github.com/charmander/pg-int8#readme", - "license": "ISC", - "name": "pg-int8", - "repository": { - "type": "git", - "url": "git+https://github.com/charmander/pg-int8.git" - }, - "scripts": { - "test": "tap test" - }, - "version": "1.0.1" -} + "name": "pg-int8", + "version": "1.0.1", + "description": "64-bit big-endian signed integer-to-string conversion", + "bugs": "https://github.com/charmander/pg-int8/issues", + "license": "ISC", + "files": [ + "index.js" + ], + "repository": { + "type": "git", + "url": "https://github.com/charmander/pg-int8" + }, + "scripts": { + "test": "tap test" + }, + "devDependencies": { + "@charmander/eslint-config-base": "1.0.2", + "tap": "10.7.3" + }, + "engines": { + "node": ">=4.0.0" + } + +,"_resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz" +,"_integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" +,"_from": "pg-int8@1.0.1" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-pool/package.json b/reverse_engineering/node_modules/pg-pool/package.json index b8b9bb9..fb6a7d5 100644 --- a/reverse_engineering/node_modules/pg-pool/package.json +++ b/reverse_engineering/node_modules/pg-pool/package.json @@ -1,36 +1,31 @@ { - "_from": "pg-pool@^3.4.1", - "_id": "pg-pool@3.4.1", - "_inBundle": false, - "_integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==", - "_location": "/pg-pool", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pg-pool@^3.4.1", - "name": "pg-pool", - "escapedName": "pg-pool", - "rawSpec": "^3.4.1", - "saveSpec": null, - "fetchSpec": "^3.4.1" + "name": "pg-pool", + "version": "3.4.1", + "description": "Connection pool for node-postgres", + "main": "index.js", + "directories": { + "test": "test" }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz", - "_shasum": "0e71ce2c67b442a5e862a9c182172c37eda71e9c", - "_spec": "pg-pool@^3.4.1", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "author": { - "name": "Brian M. Carlson" + "scripts": { + "test": " node_modules/.bin/mocha" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-pool" }, + "keywords": [ + "pg", + "postgres", + "pool", + "database" + ], + "author": "Brian M. Carlson", + "license": "MIT", "bugs": { "url": "https://github.com/brianc/node-pg-pool/issues" }, - "bundleDependencies": false, - "deprecated": false, - "description": "Connection pool for node-postgres", + "homepage": "https://github.com/brianc/node-pg-pool#readme", "devDependencies": { "bluebird": "3.4.1", "co": "4.6.0", @@ -39,30 +34,12 @@ "mocha": "^7.1.2", "pg-cursor": "^1.3.0" }, - "directories": { - "test": "test" - }, - "gitHead": "92b4d37926c276d343bfe56447ff6f526af757cf", - "homepage": "https://github.com/brianc/node-pg-pool#readme", - "keywords": [ - "pg", - "postgres", - "pool", - "database" - ], - "license": "MIT", - "main": "index.js", - "name": "pg-pool", "peerDependencies": { "pg": ">=8.0" }, - "repository": { - "type": "git", - "url": "git://github.com/brianc/node-postgres.git", - "directory": "packages/pg-pool" - }, - "scripts": { - "test": " node_modules/.bin/mocha" - }, - "version": "3.4.1" -} + "gitHead": "92b4d37926c276d343bfe56447ff6f526af757cf" + +,"_resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz" +,"_integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==" +,"_from": "pg-pool@3.4.1" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-protocol/package.json b/reverse_engineering/node_modules/pg-protocol/package.json index 3a7ce52..527eac5 100644 --- a/reverse_engineering/node_modules/pg-protocol/package.json +++ b/reverse_engineering/node_modules/pg-protocol/package.json @@ -1,33 +1,10 @@ { - "_from": "pg-protocol@^1.5.0", - "_id": "pg-protocol@1.5.0", - "_inBundle": false, - "_integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==", - "_location": "/pg-protocol", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pg-protocol@^1.5.0", - "name": "pg-protocol", - "escapedName": "pg-protocol", - "rawSpec": "^1.5.0", - "saveSpec": null, - "fetchSpec": "^1.5.0" - }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", - "_shasum": "b5dd452257314565e2d54ab3c132adc46565a6a0", - "_spec": "pg-protocol@^1.5.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "bugs": { - "url": "https://github.com/brianc/node-postgres/issues" - }, - "bundleDependencies": false, - "deprecated": false, + "name": "pg-protocol", + "version": "1.5.0", "description": "The postgres client/server binary protocol, implemented in TypeScript", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "license": "MIT", "devDependencies": { "@types/chai": "^4.2.7", "@types/mocha": "^5.2.7", @@ -38,27 +15,25 @@ "ts-node": "^8.5.4", "typescript": "^4.0.3" }, - "files": [ - "/dist/*{js,ts,map}", - "/src" - ], - "gitHead": "d45947938263bec30a1e3252452f04177b785f66", - "homepage": "https://github.com/brianc/node-postgres#readme", - "license": "MIT", - "main": "dist/index.js", - "name": "pg-protocol", - "repository": { - "type": "git", - "url": "git://github.com/brianc/node-postgres.git", - "directory": "packages/pg-protocol" - }, "scripts": { + "test": "mocha dist/**/*.test.js", "build": "tsc", "build:watch": "tsc --watch", "prepublish": "yarn build", - "pretest": "yarn build", - "test": "mocha dist/**/*.test.js" + "pretest": "yarn build" }, - "types": "dist/index.d.ts", - "version": "1.5.0" -} + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg-protocol" + }, + "files": [ + "/dist/*{js,ts,map}", + "/src" + ], + "gitHead": "d45947938263bec30a1e3252452f04177b785f66" + +,"_resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz" +,"_integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" +,"_from": "pg-protocol@1.5.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg-types/package.json b/reverse_engineering/node_modules/pg-types/package.json index 4a904aa..abfdb2c 100644 --- a/reverse_engineering/node_modules/pg-types/package.json +++ b/reverse_engineering/node_modules/pg-types/package.json @@ -1,43 +1,27 @@ { - "_from": "pg-types@^2.1.0", - "_id": "pg-types@2.2.0", - "_inBundle": false, - "_integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", - "_location": "/pg-types", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pg-types@^2.1.0", - "name": "pg-types", - "escapedName": "pg-types", - "rawSpec": "^2.1.0", - "saveSpec": null, - "fetchSpec": "^2.1.0" + "name": "pg-types", + "version": "2.2.0", + "description": "Query result type converters for node-postgres", + "main": "index.js", + "scripts": { + "test": "tape test/*.js | tap-spec && npm run test-ts", + "test-ts": "if-node-version '>= 8' tsd" }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", - "_shasum": "2d0250d636454f7cfa3b6ae0382fdfa8063254a3", - "_spec": "pg-types@^2.1.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "author": { - "name": "Brian M. Carlson" + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-pg-types.git" }, + "keywords": [ + "postgres", + "PostgreSQL", + "pg" + ], + "author": "Brian M. Carlson", + "license": "MIT", "bugs": { "url": "https://github.com/brianc/node-pg-types/issues" }, - "bundleDependencies": false, - "dependencies": { - "pg-int8": "1.0.1", - "postgres-array": "~2.0.0", - "postgres-bytea": "~1.0.0", - "postgres-date": "~1.0.4", - "postgres-interval": "^1.1.0" - }, - "deprecated": false, - "description": "Query result type converters for node-postgres", + "homepage": "https://github.com/brianc/node-pg-types", "devDependencies": { "if-node-version": "^1.1.1", "pff": "^1.0.0", @@ -45,25 +29,18 @@ "tape": "^4.0.0", "tsd": "^0.7.4" }, + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, "engines": { "node": ">=4" - }, - "homepage": "https://github.com/brianc/node-pg-types", - "keywords": [ - "postgres", - "PostgreSQL", - "pg" - ], - "license": "MIT", - "main": "index.js", - "name": "pg-types", - "repository": { - "type": "git", - "url": "git://github.com/brianc/node-pg-types.git" - }, - "scripts": { - "test": "tape test/*.js | tap-spec && npm run test-ts", - "test-ts": "if-node-version '>= 8' tsd" - }, - "version": "2.2.0" -} + } + +,"_resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz" +,"_integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==" +,"_from": "pg-types@2.2.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pg/package.json b/reverse_engineering/node_modules/pg/package.json index 6c532d5..375ed9b 100644 --- a/reverse_engineering/node_modules/pg/package.json +++ b/reverse_engineering/node_modules/pg/package.json @@ -1,36 +1,24 @@ { - "_from": "pg", - "_id": "pg@8.7.1", - "_inBundle": false, - "_integrity": "sha512-7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==", - "_location": "/pg", - "_phantomChildren": {}, - "_requested": { - "type": "tag", - "registry": true, - "raw": "pg", - "name": "pg", - "escapedName": "pg", - "rawSpec": "", - "saveSpec": null, - "fetchSpec": "latest" - }, - "_requiredBy": [ - "#USER", - "/" + "name": "pg", + "version": "8.7.1", + "description": "PostgreSQL client - pure javascript & libpq with the same API", + "keywords": [ + "database", + "libpq", + "pg", + "postgre", + "postgres", + "postgresql", + "rdbms" ], - "_resolved": "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz", - "_shasum": "9ea9d1ec225980c36f94e181d009ab9f4ce4c471", - "_spec": "pg", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "Brian Carlson", - "email": "brian.m.carlson@gmail.com" - }, - "bugs": { - "url": "https://github.com/brianc/node-postgres/issues" + "homepage": "https://github.com/brianc/node-postgres", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git", + "directory": "packages/pg" }, - "bundleDependencies": false, + "author": "Brian Carlson ", + "main": "./lib", "dependencies": { "buffer-writer": "2.0.0", "packet-reader": "1.0.0", @@ -40,35 +28,12 @@ "pg-types": "^2.1.0", "pgpass": "1.x" }, - "deprecated": false, - "description": "PostgreSQL client - pure javascript & libpq with the same API", "devDependencies": { "async": "0.9.0", "bluebird": "3.5.2", "co": "4.6.0", "pg-copy-streams": "0.3.0" }, - "engines": { - "node": ">= 8.0.0" - }, - "files": [ - "lib", - "SPONSORS.md" - ], - "gitHead": "92b4d37926c276d343bfe56447ff6f526af757cf", - "homepage": "https://github.com/brianc/node-postgres", - "keywords": [ - "database", - "libpq", - "pg", - "postgre", - "postgres", - "postgresql", - "rdbms" - ], - "license": "MIT", - "main": "./lib", - "name": "pg", "peerDependencies": { "pg-native": ">=2.0.0" }, @@ -77,13 +42,20 @@ "optional": true } }, - "repository": { - "type": "git", - "url": "git://github.com/brianc/node-postgres.git", - "directory": "packages/pg" - }, "scripts": { "test": "make test-all" }, - "version": "8.7.1" -} + "files": [ + "lib", + "SPONSORS.md" + ], + "license": "MIT", + "engines": { + "node": ">= 8.0.0" + }, + "gitHead": "92b4d37926c276d343bfe56447ff6f526af757cf" + +,"_resolved": "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz" +,"_integrity": "sha512-7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==" +,"_from": "pg@8.7.1" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/pgpass/package.json b/reverse_engineering/node_modules/pgpass/package.json index d503bd0..187c07a 100644 --- a/reverse_engineering/node_modules/pgpass/package.json +++ b/reverse_engineering/node_modules/pgpass/package.json @@ -1,40 +1,20 @@ { - "_from": "pgpass@1.x", - "_id": "pgpass@1.0.4", - "_inBundle": false, - "_integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==", - "_location": "/pgpass", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "pgpass@1.x", - "name": "pgpass", - "escapedName": "pgpass", - "rawSpec": "1.x", - "saveSpec": null, - "fetchSpec": "1.x" - }, - "_requiredBy": [ - "/pg" - ], - "_resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz", - "_shasum": "85eb93a83800b20f8057a2b029bf05abaf94ea9c", - "_spec": "pgpass@1.x", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg", - "author": { - "name": "Hannes Hörl", - "email": "hannes.hoerl+pgpass@snowreporter.com" - }, - "bugs": { - "url": "https://github.com/hoegaarden/pgpass/issues" + "name": "pgpass", + "version": "1.0.4", + "description": "Module for reading .pgpass", + "main": "lib/index", + "scripts": { + "pretest": "chmod 600 ./test/_pgpass", + "_hint": "jshint --exclude node_modules --verbose lib test", + "_test": "mocha --recursive -R list", + "_covered_test": "nyc --reporter html --reporter text \"$npm_execpath\" run _test", + "test": "\"$npm_execpath\" run _hint && \"$npm_execpath\" run _covered_test" }, - "bundleDependencies": false, + "author": "Hannes Hörl ", + "license": "MIT", "dependencies": { "split2": "^3.1.1" }, - "deprecated": false, - "description": "Module for reading .pgpass", "devDependencies": { "jshint": "^2.12.0", "mocha": "^8.2.0", @@ -46,7 +26,6 @@ "tmp": "^0.2.1", "which": "^2.0.2" }, - "homepage": "https://github.com/hoegaarden/pgpass#readme", "keywords": [ "postgres", "pg", @@ -54,19 +33,13 @@ "password", "postgresql" ], - "license": "MIT", - "main": "lib/index", - "name": "pgpass", + "bugs": "https://github.com/hoegaarden/pgpass/issues", "repository": { "type": "git", - "url": "git+https://github.com/hoegaarden/pgpass.git" - }, - "scripts": { - "_covered_test": "nyc --reporter html --reporter text \"$npm_execpath\" run _test", - "_hint": "jshint --exclude node_modules --verbose lib test", - "_test": "mocha --recursive -R list", - "pretest": "chmod 600 ./test/_pgpass", - "test": "\"$npm_execpath\" run _hint && \"$npm_execpath\" run _covered_test" - }, - "version": "1.0.4" -} + "url": "https://github.com/hoegaarden/pgpass.git" + } + +,"_resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz" +,"_integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==" +,"_from": "pgpass@1.0.4" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/postgres-array/package.json b/reverse_engineering/node_modules/postgres-array/package.json index 3d72116..9048811 100644 --- a/reverse_engineering/node_modules/postgres-array/package.json +++ b/reverse_engineering/node_modules/postgres-array/package.json @@ -1,67 +1,39 @@ { - "_from": "postgres-array@~2.0.0", - "_id": "postgres-array@2.0.0", - "_inBundle": false, - "_integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", - "_location": "/postgres-array", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "postgres-array@~2.0.0", - "name": "postgres-array", - "escapedName": "postgres-array", - "rawSpec": "~2.0.0", - "saveSpec": null, - "fetchSpec": "~2.0.0" - }, - "_requiredBy": [ - "/pg-types" - ], - "_resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", - "_shasum": "48f8fce054fbc69671999329b8834b772652d82e", - "_spec": "postgres-array@~2.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "name": "postgres-array", + "main": "index.js", + "version": "2.0.0", + "description": "Parse postgres array columns", + "license": "MIT", + "repository": "bendrucker/postgres-array", "author": { "name": "Ben Drucker", "email": "bvdrucker@gmail.com", "url": "bendrucker.me" }, - "bugs": { - "url": "https://github.com/bendrucker/postgres-array/issues" + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "standard && tape test.js" }, - "bundleDependencies": false, + "types": "index.d.ts", + "keywords": [ + "postgres", + "array", + "parser" + ], "dependencies": {}, - "deprecated": false, - "description": "Parse postgres array columns", "devDependencies": { "standard": "^12.0.1", "tape": "^4.0.0" }, - "engines": { - "node": ">=4" - }, "files": [ "index.js", "index.d.ts", "readme.md" - ], - "homepage": "https://github.com/bendrucker/postgres-array#readme", - "keywords": [ - "postgres", - "array", - "parser" - ], - "license": "MIT", - "main": "index.js", - "name": "postgres-array", - "repository": { - "type": "git", - "url": "git+https://github.com/bendrucker/postgres-array.git" - }, - "scripts": { - "test": "standard && tape test.js" - }, - "types": "index.d.ts", - "version": "2.0.0" -} + ] + +,"_resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz" +,"_integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" +,"_from": "postgres-array@2.0.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/postgres-bytea/package.json b/reverse_engineering/node_modules/postgres-bytea/package.json index 5a00b5d..3b168d0 100644 --- a/reverse_engineering/node_modules/postgres-bytea/package.json +++ b/reverse_engineering/node_modules/postgres-bytea/package.json @@ -1,66 +1,38 @@ { - "_from": "postgres-bytea@~1.0.0", - "_id": "postgres-bytea@1.0.0", - "_inBundle": false, - "_integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=", - "_location": "/postgres-bytea", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "postgres-bytea@~1.0.0", - "name": "postgres-bytea", - "escapedName": "postgres-bytea", - "rawSpec": "~1.0.0", - "saveSpec": null, - "fetchSpec": "~1.0.0" - }, - "_requiredBy": [ - "/pg-types" - ], - "_resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", - "_shasum": "027b533c0aa890e26d172d47cf9ccecc521acd35", - "_spec": "postgres-bytea@~1.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "name": "postgres-bytea", + "main": "index.js", + "version": "1.0.0", + "description": "Postgres bytea parser", + "license": "MIT", + "repository": "bendrucker/postgres-bytea", "author": { "name": "Ben Drucker", "email": "bvdrucker@gmail.com", "url": "bendrucker.me" }, - "bugs": { - "url": "https://github.com/bendrucker/postgres-bytea/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Postgres bytea parser", - "devDependencies": { - "standard": "^4.0.0", - "tape": "^4.0.0" - }, "engines": { "node": ">=0.10.0" }, - "files": [ - "index.js", - "readme.md" - ], - "homepage": "https://github.com/bendrucker/postgres-bytea#readme", + "scripts": { + "test": "standard && tape test.js" + }, "keywords": [ "bytea", "postgres", "binary", "parser" ], - "license": "MIT", - "main": "index.js", - "name": "postgres-bytea", - "repository": { - "type": "git", - "url": "git+https://github.com/bendrucker/postgres-bytea.git" - }, - "scripts": { - "test": "standard && tape test.js" + "dependencies": {}, + "devDependencies": { + "tape": "^4.0.0", + "standard": "^4.0.0" }, - "version": "1.0.0" -} + "files": [ + "index.js", + "readme.md" + ] + +,"_resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz" +,"_integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" +,"_from": "postgres-bytea@1.0.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/postgres-date/package.json b/reverse_engineering/node_modules/postgres-date/package.json index 072f85c..dc614bd 100644 --- a/reverse_engineering/node_modules/postgres-date/package.json +++ b/reverse_engineering/node_modules/postgres-date/package.json @@ -1,65 +1,37 @@ { - "_from": "postgres-date@~1.0.4", - "_id": "postgres-date@1.0.7", - "_inBundle": false, - "_integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", - "_location": "/postgres-date", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "postgres-date@~1.0.4", - "name": "postgres-date", - "escapedName": "postgres-date", - "rawSpec": "~1.0.4", - "saveSpec": null, - "fetchSpec": "~1.0.4" - }, - "_requiredBy": [ - "/pg-types" - ], - "_resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", - "_shasum": "51bc086006005e5061c591cee727f2531bf641a8", - "_spec": "postgres-date@~1.0.4", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "name": "postgres-date", + "main": "index.js", + "version": "1.0.7", + "description": "Postgres date column parser", + "license": "MIT", + "repository": "bendrucker/postgres-date", "author": { "name": "Ben Drucker", "email": "bvdrucker@gmail.com", "url": "bendrucker.me" }, - "bugs": { - "url": "https://github.com/bendrucker/postgres-date/issues" - }, - "bundleDependencies": false, - "dependencies": {}, - "deprecated": false, - "description": "Postgres date column parser", - "devDependencies": { - "standard": "^14.0.0", - "tape": "^5.0.0" - }, "engines": { "node": ">=0.10.0" }, - "files": [ - "index.js", - "readme.md" - ], - "homepage": "https://github.com/bendrucker/postgres-date#readme", + "scripts": { + "test": "standard && tape test.js" + }, "keywords": [ "postgres", "date", "parser" ], - "license": "MIT", - "main": "index.js", - "name": "postgres-date", - "repository": { - "type": "git", - "url": "git+https://github.com/bendrucker/postgres-date.git" - }, - "scripts": { - "test": "standard && tape test.js" + "dependencies": {}, + "devDependencies": { + "standard": "^14.0.0", + "tape": "^5.0.0" }, - "version": "1.0.7" -} + "files": [ + "index.js", + "readme.md" + ] + +,"_resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz" +,"_integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==" +,"_from": "postgres-date@1.0.7" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/postgres-interval/package.json b/reverse_engineering/node_modules/postgres-interval/package.json index 50acc4b..192edb5 100644 --- a/reverse_engineering/node_modules/postgres-interval/package.json +++ b/reverse_engineering/node_modules/postgres-interval/package.json @@ -1,68 +1,40 @@ { - "_from": "postgres-interval@^1.1.0", - "_id": "postgres-interval@1.2.0", - "_inBundle": false, - "_integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", - "_location": "/postgres-interval", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "postgres-interval@^1.1.0", - "name": "postgres-interval", - "escapedName": "postgres-interval", - "rawSpec": "^1.1.0", - "saveSpec": null, - "fetchSpec": "^1.1.0" - }, - "_requiredBy": [ - "/pg-types" - ], - "_resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", - "_shasum": "b460c82cb1587507788819a06aa0fffdb3544695", - "_spec": "postgres-interval@^1.1.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pg-types", + "name": "postgres-interval", + "main": "index.js", + "version": "1.2.0", + "description": "Parse Postgres interval columns", + "license": "MIT", + "repository": "bendrucker/postgres-interval", "author": { "name": "Ben Drucker", "email": "bvdrucker@gmail.com", "url": "bendrucker.me" }, - "bugs": { - "url": "https://github.com/bendrucker/postgres-interval/issues" - }, - "bundleDependencies": false, - "dependencies": { - "xtend": "^4.0.0" - }, - "deprecated": false, - "description": "Parse Postgres interval columns", - "devDependencies": { - "standard": "^12.0.1", - "tape": "^4.0.0" - }, "engines": { "node": ">=0.10.0" }, - "files": [ - "index.js", - "index.d.ts", - "readme.md" - ], - "homepage": "https://github.com/bendrucker/postgres-interval#readme", + "scripts": { + "test": "standard && tape test.js" + }, "keywords": [ "postgres", "interval", "parser" ], - "license": "MIT", - "main": "index.js", - "name": "postgres-interval", - "repository": { - "type": "git", - "url": "git+https://github.com/bendrucker/postgres-interval.git" + "dependencies": { + "xtend": "^4.0.0" }, - "scripts": { - "test": "standard && tape test.js" + "devDependencies": { + "tape": "^4.0.0", + "standard": "^12.0.1" }, - "version": "1.2.0" -} + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ] + +,"_resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz" +,"_integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==" +,"_from": "postgres-interval@1.2.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/readable-stream/package.json b/reverse_engineering/node_modules/readable-stream/package.json index 7878db8..3623e06 100644 --- a/reverse_engineering/node_modules/readable-stream/package.json +++ b/reverse_engineering/node_modules/readable-stream/package.json @@ -1,46 +1,16 @@ { - "_from": "readable-stream@^3.0.0", - "_id": "readable-stream@3.6.0", - "_inBundle": false, - "_integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "_location": "/readable-stream", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "readable-stream@^3.0.0", - "name": "readable-stream", - "escapedName": "readable-stream", - "rawSpec": "^3.0.0", - "saveSpec": null, - "fetchSpec": "^3.0.0" - }, - "_requiredBy": [ - "/split2" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "_shasum": "337bbda3adc0706bd3e024426a286d4b4b2c9198", - "_spec": "readable-stream@^3.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/split2", - "browser": { - "util": false, - "worker_threads": false, - "./errors": "./errors-browser.js", - "./readable.js": "./readable-browser.js", - "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", - "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" + "name": "readable-stream", + "version": "3.6.0", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" }, - "bundleDependencies": false, "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" }, - "deprecated": false, - "description": "Streams3, a user-land copy of the stream library from Node.js", "devDependencies": { "@babel/cli": "^7.2.0", "@babel/core": "^7.2.0", @@ -63,35 +33,40 @@ "tar-fs": "^1.16.2", "util-promisify": "^2.1.0" }, - "engines": { - "node": ">= 6" + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" }, - "homepage": "https://github.com/nodejs/readable-stream#readme", "keywords": [ "readable", "stream", "pipe" ], - "license": "MIT", - "main": "readable.js", - "name": "readable-stream", + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, "nyc": { "include": [ "lib/**.js" ] }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", - "cover": "nyc npm test", - "report": "nyc report --reporter=lcov", - "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", - "test-browser-local": "airtap --open --local -- test/browser.js", - "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", - "update-browser-errors": "babel -o errors-browser.js errors.js" - }, - "version": "3.6.0" -} + "license": "MIT" + +,"_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz" +,"_integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==" +,"_from": "readable-stream@3.6.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/safe-buffer/package.json b/reverse_engineering/node_modules/safe-buffer/package.json index 098d7d6..cea58cc 100644 --- a/reverse_engineering/node_modules/safe-buffer/package.json +++ b/reverse_engineering/node_modules/safe-buffer/package.json @@ -1,27 +1,7 @@ { - "_from": "safe-buffer@~5.2.0", - "_id": "safe-buffer@5.2.1", - "_inBundle": false, - "_integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "_location": "/safe-buffer", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "safe-buffer@~5.2.0", - "name": "safe-buffer", - "escapedName": "safe-buffer", - "rawSpec": "~5.2.0", - "saveSpec": null, - "fetchSpec": "~5.2.0" - }, - "_requiredBy": [ - "/string_decoder" - ], - "_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "_shasum": "1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6", - "_spec": "safe-buffer@~5.2.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/string_decoder", + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", "author": { "name": "Feross Aboukhadijeh", "email": "feross@feross.org", @@ -30,27 +10,10 @@ "bugs": { "url": "https://github.com/feross/safe-buffer/issues" }, - "bundleDependencies": false, - "deprecated": false, - "description": "Safer Node.js Buffer API", "devDependencies": { "standard": "*", "tape": "^5.0.0" }, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], "homepage": "https://github.com/feross/safe-buffer", "keywords": [ "buffer", @@ -63,7 +26,7 @@ ], "license": "MIT", "main": "index.js", - "name": "safe-buffer", + "types": "index.d.ts", "repository": { "type": "git", "url": "git://github.com/feross/safe-buffer.git" @@ -71,6 +34,22 @@ "scripts": { "test": "standard && tape test/*.js" }, - "types": "index.d.ts", - "version": "5.2.1" -} + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + +,"_resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" +,"_integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" +,"_from": "safe-buffer@5.2.1" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/safer-buffer/package.json b/reverse_engineering/node_modules/safer-buffer/package.json index 895825b..4ce536b 100644 --- a/reverse_engineering/node_modules/safer-buffer/package.json +++ b/reverse_engineering/node_modules/safer-buffer/package.json @@ -1,41 +1,25 @@ { - "_args": [ - [ - "safer-buffer@2.1.2", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "safer-buffer@2.1.2", - "_id": "safer-buffer@2.1.2", - "_inBundle": false, - "_integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "_location": "/safer-buffer", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "safer-buffer@2.1.2", - "name": "safer-buffer", - "escapedName": "safer-buffer", - "rawSpec": "2.1.2", - "saveSpec": null, - "fetchSpec": "2.1.2" + "name": "safer-buffer", + "version": "2.1.2", + "description": "Modern Buffer API polyfill without footguns", + "main": "safer.js", + "scripts": { + "browserify-test": "browserify --external tape tests.js > browserify-tests.js && tape browserify-tests.js", + "test": "standard && tape tests.js" }, - "_requiredBy": [ - "/asn1" - ], - "_resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "_spec": "2.1.2", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", "author": { "name": "Nikita Skovoroda", "email": "chalkerx@gmail.com", "url": "https://github.com/ChALkeR" }, + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/ChALkeR/safer-buffer.git" + }, "bugs": { "url": "https://github.com/ChALkeR/safer-buffer/issues" }, - "description": "Modern Buffer API polyfill without footguns", "devDependencies": { "standard": "^11.0.1", "tape": "^4.9.0" @@ -46,18 +30,9 @@ "tests.js", "dangerous.js", "safer.js" - ], - "homepage": "https://github.com/ChALkeR/safer-buffer#readme", - "license": "MIT", - "main": "safer.js", - "name": "safer-buffer", - "repository": { - "type": "git", - "url": "git+https://github.com/ChALkeR/safer-buffer.git" - }, - "scripts": { - "browserify-test": "browserify --external tape tests.js > browserify-tests.js && tape browserify-tests.js", - "test": "standard && tape tests.js" - }, - "version": "2.1.2" -} + ] + +,"_resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" +,"_integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" +,"_from": "safer-buffer@2.1.2" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/semver/package.json b/reverse_engineering/node_modules/semver/package.json index 3d1a79f..31e12dd 100644 --- a/reverse_engineering/node_modules/semver/package.json +++ b/reverse_engineering/node_modules/semver/package.json @@ -1,63 +1,32 @@ { - "_args": [ - [ - "semver@5.7.1", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "semver@5.7.1", - "_id": "semver@5.7.1", - "_inBundle": false, - "_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "_location": "/semver", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "semver@5.7.1", - "name": "semver", - "escapedName": "semver", - "rawSpec": "5.7.1", - "saveSpec": null, - "fetchSpec": "5.7.1" - }, - "_requiredBy": [ - "/ssh2-streams" - ], - "_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "_spec": "5.7.1", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "bin": { - "semver": "bin/semver" - }, - "bugs": { - "url": "https://github.com/npm/node-semver/issues" - }, + "name": "semver", + "version": "5.7.1", "description": "The semantic version parser used by npm.", + "main": "semver.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, "devDependencies": { "tap": "^13.0.0-rc.18" }, + "license": "ISC", + "repository": "https://github.com/npm/node-semver", + "bin": { + "semver": "./bin/semver" + }, "files": [ "bin", "range.bnf", "semver.js" ], - "homepage": "https://github.com/npm/node-semver#readme", - "license": "ISC", - "main": "semver.js", - "name": "semver", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/node-semver.git" - }, - "scripts": { - "postpublish": "git push origin --all; git push origin --tags", - "postversion": "npm publish", - "preversion": "npm test", - "test": "tap" - }, "tap": { "check-coverage": true - }, - "version": "5.7.1" -} + } + +,"_resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz" +,"_integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" +,"_from": "semver@5.7.1" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/split2/package.json b/reverse_engineering/node_modules/split2/package.json index 1f20e1d..1e31f4a 100644 --- a/reverse_engineering/node_modules/split2/package.json +++ b/reverse_engineering/node_modules/split2/package.json @@ -1,40 +1,29 @@ { - "_from": "split2@^3.1.1", - "_id": "split2@3.2.2", - "_inBundle": false, - "_integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", - "_location": "/split2", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "split2@^3.1.1", - "name": "split2", - "escapedName": "split2", - "rawSpec": "^3.1.1", - "saveSpec": null, - "fetchSpec": "^3.1.1" + "name": "split2", + "version": "3.2.2", + "description": "split a Text Stream into a Line Stream, using Stream 3", + "main": "index.js", + "scripts": { + "lint": "standard --verbose", + "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test.js", + "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js", + "test:report": "npm run lint && npm run unit:report", + "test": "npm run lint && npm run unit", + "legacy": "tape test.js" }, - "_requiredBy": [ - "/pgpass" + "pre-commit": [ + "test" ], - "_resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", - "_shasum": "bf2cf2a37d838312c249c89206fd7a17dd12365f", - "_spec": "split2@^3.1.1", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/pgpass", - "author": { - "name": "Matteo Collina", - "email": "hello@matteocollina.com" + "website": "https://github.com/mcollina/split2", + "repository": { + "type": "git", + "url": "https://github.com/mcollina/split2.git" }, "bugs": { "url": "http://github.com/mcollina/split2/issues" }, - "bundleDependencies": false, - "dependencies": { - "readable-stream": "^3.0.0" - }, - "deprecated": false, - "description": "split a Text Stream into a Line Stream, using Stream 3", + "author": "Matteo Collina ", + "license": "ISC", "devDependencies": { "binary-split": "^1.0.3", "callback-stream": "^1.1.0", @@ -45,25 +34,11 @@ "standard": "^14.0.0", "tape": "^5.0.0" }, - "homepage": "https://github.com/mcollina/split2#readme", - "license": "ISC", - "main": "index.js", - "name": "split2", - "pre-commit": [ - "test" - ], - "repository": { - "type": "git", - "url": "git+https://github.com/mcollina/split2.git" - }, - "scripts": { - "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js", - "legacy": "tape test.js", - "lint": "standard --verbose", - "test": "npm run lint && npm run unit", - "test:report": "npm run lint && npm run unit:report", - "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test.js" - }, - "version": "3.2.2", - "website": "https://github.com/mcollina/split2" -} + "dependencies": { + "readable-stream": "^3.0.0" + } + +,"_resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz" +,"_integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==" +,"_from": "split2@3.2.2" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2-streams/package.json b/reverse_engineering/node_modules/ssh2-streams/package.json index 396c3cc..c14e8bc 100644 --- a/reverse_engineering/node_modules/ssh2-streams/package.json +++ b/reverse_engineering/node_modules/ssh2-streams/package.json @@ -1,73 +1,22 @@ -{ - "_args": [ - [ - "ssh2-streams@0.1.20", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "ssh2-streams@0.1.20", - "_id": "ssh2-streams@0.1.20", - "_inBundle": false, - "_integrity": "sha1-URGNFUVV31Rp7h9n4M8efoosDjo=", - "_location": "/ssh2-streams", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "ssh2-streams@0.1.20", - "name": "ssh2-streams", - "escapedName": "ssh2-streams", - "rawSpec": "0.1.20", - "saveSpec": null, - "fetchSpec": "0.1.20" - }, - "_requiredBy": [ - "/ssh2" - ], - "_resolved": "https://registry.npmjs.org/ssh2-streams/-/ssh2-streams-0.1.20.tgz", - "_spec": "0.1.20", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "Brian White", - "email": "mscdex@mscdex.net" - }, - "bugs": { - "url": "https://github.com/mscdex/ssh2-streams/issues" - }, - "dependencies": { - "asn1": "~0.2.0", - "semver": "^5.1.0", - "streamsearch": "~0.1.2" - }, +{ "name": "ssh2-streams", + "version": "0.1.20", + "author": "Brian White ", "description": "SSH2 and SFTP(v3) client/server protocol streams for node.js", - "engines": { - "node": ">=0.10.0" - }, - "homepage": "https://github.com/mscdex/ssh2-streams#readme", - "keywords": [ - "ssh", - "ssh2", - "sftp", - "secure", - "protocol", - "streams", - "client", - "server" - ], - "licenses": [ - { - "type": "MIT", - "url": "http://github.com/mscdex/ssh2-streams/raw/master/LICENSE" - } - ], "main": "./index", - "name": "ssh2-streams", - "repository": { - "type": "git", - "url": "git+ssh://git@github.com/mscdex/ssh2-streams.git" + "engines": { "node": ">=0.10.0" }, + "dependencies": { + "streamsearch": "~0.1.2", + "asn1": "~0.2.0", + "semver": "^5.1.0" }, "scripts": { "test": "node test/test.js" }, - "version": "0.1.20" -} + "keywords": [ "ssh", "ssh2", "sftp", "secure", "protocol", "streams", "client", "server" ], + "licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/ssh2-streams/raw/master/LICENSE" } ], + "repository" : { "type": "git", "url": "http://github.com/mscdex/ssh2-streams.git" } + +,"_resolved": "https://registry.npmjs.org/ssh2-streams/-/ssh2-streams-0.1.20.tgz" +,"_integrity": "sha1-URGNFUVV31Rp7h9n4M8efoosDjo=" +,"_from": "ssh2-streams@0.1.20" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/ssh2/package.json b/reverse_engineering/node_modules/ssh2/package.json index e3a3f4d..7c7dd13 100644 --- a/reverse_engineering/node_modules/ssh2/package.json +++ b/reverse_engineering/node_modules/ssh2/package.json @@ -1,74 +1,23 @@ -{ - "_args": [ - [ - "ssh2@0.5.4", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "ssh2@0.5.4", - "_id": "ssh2@0.5.4", - "_inBundle": false, - "_integrity": "sha1-G/a2soyW6u8mf01sRqWiUXpZnic=", - "_location": "/ssh2", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "ssh2@0.5.4", - "name": "ssh2", - "escapedName": "ssh2", - "rawSpec": "0.5.4", - "saveSpec": null, - "fetchSpec": "0.5.4" - }, - "_requiredBy": [ - "/tunnel-ssh" - ], - "_resolved": "https://registry.npmjs.org/ssh2/-/ssh2-0.5.4.tgz", - "_spec": "0.5.4", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "Brian White", - "email": "mscdex@mscdex.net" - }, - "bugs": { - "url": "https://github.com/mscdex/ssh2/issues" - }, +{ "name": "ssh2", + "version": "0.5.4", + "author": "Brian White ", + "description": "SSH2 client and server modules written in pure JavaScript for node.js", + "main": "./lib/client", + "engines": { "node": ">=0.10.0" }, "dependencies": { "ssh2-streams": "~0.1.15" }, - "description": "SSH2 client and server modules written in pure JavaScript for node.js", "devDependencies": { "semver": "^5.1.0" }, - "engines": { - "node": ">=0.10.0" - }, - "homepage": "https://github.com/mscdex/ssh2#readme", - "keywords": [ - "ssh", - "ssh2", - "sftp", - "secure", - "shell", - "exec", - "remote", - "client" - ], - "licenses": [ - { - "type": "MIT", - "url": "http://github.com/mscdex/ssh2/raw/master/LICENSE" - } - ], - "main": "./lib/client", - "name": "ssh2", - "repository": { - "type": "git", - "url": "git+ssh://git@github.com/mscdex/ssh2.git" - }, "scripts": { "test": "node test/test.js" }, - "version": "0.5.4" -} + "keywords": [ "ssh", "ssh2", "sftp", "secure", "shell", "exec", "remote", "client" ], + "licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/ssh2/raw/master/LICENSE" } ], + "repository" : { "type": "git", "url": "http://github.com/mscdex/ssh2.git" } + +,"_resolved": "https://registry.npmjs.org/ssh2/-/ssh2-0.5.4.tgz" +,"_integrity": "sha1-G/a2soyW6u8mf01sRqWiUXpZnic=" +,"_from": "ssh2@0.5.4" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/streamsearch/package.json b/reverse_engineering/node_modules/streamsearch/package.json index 9dde543..2a4916a 100644 --- a/reverse_engineering/node_modules/streamsearch/package.json +++ b/reverse_engineering/node_modules/streamsearch/package.json @@ -1,62 +1,14 @@ -{ - "_args": [ - [ - "streamsearch@0.1.2", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "streamsearch@0.1.2", - "_id": "streamsearch@0.1.2", - "_inBundle": false, - "_integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=", - "_location": "/streamsearch", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "streamsearch@0.1.2", - "name": "streamsearch", - "escapedName": "streamsearch", - "rawSpec": "0.1.2", - "saveSpec": null, - "fetchSpec": "0.1.2" - }, - "_requiredBy": [ - "/ssh2-streams" - ], - "_resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", - "_spec": "0.1.2", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", - "author": { - "name": "Brian White", - "email": "mscdex@mscdex.net" - }, - "bugs": { - "url": "https://github.com/mscdex/streamsearch/issues" - }, +{ "name": "streamsearch", + "version": "0.1.2", + "author": "Brian White ", "description": "Streaming Boyer-Moore-Horspool searching for node.js", - "engines": { - "node": ">=0.8.0" - }, - "homepage": "https://github.com/mscdex/streamsearch#readme", - "keywords": [ - "stream", - "horspool", - "boyer-moore-horspool", - "boyer-moore", - "search" - ], - "licenses": [ - { - "type": "MIT", - "url": "http://github.com/mscdex/streamsearch/raw/master/LICENSE" - } - ], "main": "./lib/sbmh", - "name": "streamsearch", - "repository": { - "type": "git", - "url": "git+ssh://git@github.com/mscdex/streamsearch.git" - }, - "version": "0.1.2" -} + "engines": { "node" : ">=0.8.0" }, + "keywords": [ "stream", "horspool", "boyer-moore-horspool", "boyer-moore", "search" ], + "licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/streamsearch/raw/master/LICENSE" } ], + "repository": { "type": "git", "url": "http://github.com/mscdex/streamsearch.git" } + +,"_resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz" +,"_integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=" +,"_from": "streamsearch@0.1.2" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/string_decoder/package.json b/reverse_engineering/node_modules/string_decoder/package.json index 4e6db1b..8f52781 100644 --- a/reverse_engineering/node_modules/string_decoder/package.json +++ b/reverse_engineering/node_modules/string_decoder/package.json @@ -1,45 +1,28 @@ { - "_from": "string_decoder@^1.1.1", - "_id": "string_decoder@1.3.0", - "_inBundle": false, - "_integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "_location": "/string_decoder", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "string_decoder@^1.1.1", - "name": "string_decoder", - "escapedName": "string_decoder", - "rawSpec": "^1.1.1", - "saveSpec": null, - "fetchSpec": "^1.1.1" - }, - "_requiredBy": [ - "/readable-stream" + "name": "string_decoder", + "version": "1.3.0", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "files": [ + "lib" ], - "_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "_shasum": "42f114594a46cf1a8e30b0a84f56c78c3edac21e", - "_spec": "string_decoder@^1.1.1", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/readable-stream", - "bugs": { - "url": "https://github.com/nodejs/string_decoder/issues" - }, - "bundleDependencies": false, "dependencies": { "safe-buffer": "~5.2.0" }, - "deprecated": false, - "description": "The string_decoder module from Node core", "devDependencies": { "babel-polyfill": "^6.23.0", "core-util-is": "^1.0.2", "inherits": "^2.0.3", "tap": "~0.4.8" }, - "files": [ - "lib" - ], + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, "homepage": "https://github.com/nodejs/string_decoder", "keywords": [ "string", @@ -47,16 +30,9 @@ "browser", "browserify" ], - "license": "MIT", - "main": "lib/string_decoder.js", - "name": "string_decoder", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/string_decoder.git" - }, - "scripts": { - "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", - "test": "tap test/parallel/*.js && node test/verify-dependencies" - }, - "version": "1.3.0" -} + "license": "MIT" + +,"_resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" +,"_integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==" +,"_from": "string_decoder@1.3.0" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/tunnel-ssh/package.json b/reverse_engineering/node_modules/tunnel-ssh/package.json index adf4939..c693857 100644 --- a/reverse_engineering/node_modules/tunnel-ssh/package.json +++ b/reverse_engineering/node_modules/tunnel-ssh/package.json @@ -1,45 +1,32 @@ { - "_args": [ - [ - "tunnel-ssh@4.1.4", - "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering" - ] - ], - "_from": "tunnel-ssh@4.1.4", - "_id": "tunnel-ssh@4.1.4", - "_inBundle": false, - "_integrity": "sha512-CjBqboGvAbM7iXSX2F95kzoI+c2J81YkrHbyyo4SWNKCzU6w5LfEvXBCHu6PPriYaNvfhMKzD8bFf5Vl14YTtg==", - "_location": "/tunnel-ssh", - "_phantomChildren": {}, - "_requested": { - "type": "version", - "registry": true, - "raw": "tunnel-ssh@4.1.4", - "name": "tunnel-ssh", - "escapedName": "tunnel-ssh", - "rawSpec": "4.1.4", - "saveSpec": null, - "fetchSpec": "4.1.4" + "name": "tunnel-ssh", + "version": "4.1.4", + "description": "Easy extendable SSH tunnel", + "main": "index.js", + "scripts": { + "test": "mocha && eslint ." + }, + "repository": { + "type": "git", + "url": "https://github.com/agebrock/tunnel-ssh" }, - "_requiredBy": [ - "/" + "keywords": [ + "tunnel", + "ssh", + "mysql", + "develop", + "net" ], - "_resolved": "https://registry.npmjs.org/tunnel-ssh/-/tunnel-ssh-4.1.4.tgz", - "_spec": "4.1.4", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering", "author": { "name": "Christoph Hagenbrock", "email": "christoph.hagenbrock@googlemail.com" }, - "bugs": { - "url": "https://github.com/agebrock/tunnel-ssh/issues" - }, + "license": "MIT", "dependencies": { "debug": "2.6.9", "lodash.defaults": "^4.1.0", "ssh2": "0.5.4" }, - "description": "Easy extendable SSH tunnel", "devDependencies": { "chai": "3.5.0", "eslint": "^3.2.2", @@ -57,24 +44,9 @@ 4 ] } - }, - "homepage": "https://github.com/agebrock/tunnel-ssh#readme", - "keywords": [ - "tunnel", - "ssh", - "mysql", - "develop", - "net" - ], - "license": "MIT", - "main": "index.js", - "name": "tunnel-ssh", - "repository": { - "type": "git", - "url": "git+https://github.com/agebrock/tunnel-ssh.git" - }, - "scripts": { - "test": "mocha && eslint ." - }, - "version": "4.1.4" -} + } + +,"_resolved": "https://registry.npmjs.org/tunnel-ssh/-/tunnel-ssh-4.1.4.tgz" +,"_integrity": "sha512-CjBqboGvAbM7iXSX2F95kzoI+c2J81YkrHbyyo4SWNKCzU6w5LfEvXBCHu6PPriYaNvfhMKzD8bFf5Vl14YTtg==" +,"_from": "tunnel-ssh@4.1.4" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/util-deprecate/package.json b/reverse_engineering/node_modules/util-deprecate/package.json index 210abf3..5d06210 100644 --- a/reverse_engineering/node_modules/util-deprecate/package.json +++ b/reverse_engineering/node_modules/util-deprecate/package.json @@ -1,40 +1,16 @@ { - "_from": "util-deprecate@^1.0.1", - "_id": "util-deprecate@1.0.2", - "_inBundle": false, - "_integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "_location": "/util-deprecate", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "util-deprecate@^1.0.1", - "name": "util-deprecate", - "escapedName": "util-deprecate", - "rawSpec": "^1.0.1", - "saveSpec": null, - "fetchSpec": "^1.0.1" - }, - "_requiredBy": [ - "/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "_shasum": "450d4dc9fa70de732762fbd2d4a28981419a0ccf", - "_spec": "util-deprecate@^1.0.1", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/readable-stream", - "author": { - "name": "Nathan Rajlich", - "email": "nathan@tootallnate.net", - "url": "http://n8.io/" - }, + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", "browser": "browser.js", - "bugs": { - "url": "https://github.com/TooTallNate/util-deprecate/issues" + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" }, - "bundleDependencies": false, - "deprecated": false, - "description": "The Node.js `util.deprecate()` function with browser support", - "homepage": "https://github.com/TooTallNate/util-deprecate", "keywords": [ "util", "deprecate", @@ -42,15 +18,14 @@ "browser", "node" ], + "author": "Nathan Rajlich (http://n8.io/)", "license": "MIT", - "main": "node.js", - "name": "util-deprecate", - "repository": { - "type": "git", - "url": "git://github.com/TooTallNate/util-deprecate.git" - }, - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" }, - "version": "1.0.2" -} + "homepage": "https://github.com/TooTallNate/util-deprecate" + +,"_resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" +,"_integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" +,"_from": "util-deprecate@1.0.2" +} \ No newline at end of file diff --git a/reverse_engineering/node_modules/xtend/package.json b/reverse_engineering/node_modules/xtend/package.json index 2d4046d..88919a0 100644 --- a/reverse_engineering/node_modules/xtend/package.json +++ b/reverse_engineering/node_modules/xtend/package.json @@ -1,54 +1,7 @@ { - "_from": "xtend@^4.0.0", - "_id": "xtend@4.0.2", - "_inBundle": false, - "_integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", - "_location": "/xtend", - "_phantomChildren": {}, - "_requested": { - "type": "range", - "registry": true, - "raw": "xtend@^4.0.0", - "name": "xtend", - "escapedName": "xtend", - "rawSpec": "^4.0.0", - "saveSpec": null, - "fetchSpec": "^4.0.0" - }, - "_requiredBy": [ - "/postgres-interval" - ], - "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "_shasum": "bb72779f5fa465186b1f438f674fa347fdb5db54", - "_spec": "xtend@^4.0.0", - "_where": "/home/vitalii/.hackolade/plugins/PostgreSQL/reverse_engineering/node_modules/postgres-interval", - "author": { - "name": "Raynos", - "email": "raynos2@gmail.com" - }, - "bugs": { - "url": "https://github.com/Raynos/xtend/issues", - "email": "raynos2@gmail.com" - }, - "bundleDependencies": false, - "contributors": [ - { - "name": "Jake Verbaten" - }, - { - "name": "Matt Esch" - } - ], - "dependencies": {}, - "deprecated": false, + "name": "xtend", + "version": "4.0.2", "description": "extend like a boss", - "devDependencies": { - "tape": "~1.1.0" - }, - "engines": { - "node": ">=0.4" - }, - "homepage": "https://github.com/Raynos/xtend", "keywords": [ "extend", "merge", @@ -57,16 +10,30 @@ "object", "array" ], - "license": "MIT", + "author": "Raynos ", + "repository": "git://github.com/Raynos/xtend.git", "main": "immutable", - "name": "xtend", - "repository": { - "type": "git", - "url": "git://github.com/Raynos/xtend.git" - }, "scripts": { "test": "node test" }, + "dependencies": {}, + "devDependencies": { + "tape": "~1.1.0" + }, + "homepage": "https://github.com/Raynos/xtend", + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "license": "MIT", "testling": { "files": "test.js", "browsers": [ @@ -82,5 +49,11 @@ "iphone/6.0..latest" ] }, - "version": "4.0.2" -} + "engines": { + "node": ">=0.4" + } + +,"_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" +,"_integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" +,"_from": "xtend@4.0.2" +} \ No newline at end of file diff --git a/reverse_engineering/package-lock.json b/reverse_engineering/package-lock.json index ea1046c..37da0eb 100644 --- a/reverse_engineering/package-lock.json +++ b/reverse_engineering/package-lock.json @@ -1,5 +1,5 @@ { - "name": "MSSQLServer", + "name": "PostgreSQL", "version": "1.0.0", "lockfileVersion": 1, "requires": true, diff --git a/reverse_engineering/package.json b/reverse_engineering/package.json index 91a0a68..5787c42 100644 --- a/reverse_engineering/package.json +++ b/reverse_engineering/package.json @@ -1,5 +1,5 @@ { - "name": "MSSQLServer", + "name": "PostgreSQL", "version": "1.0.0", "description": "", "author": "Hackolade", diff --git a/types/multirange.json b/types/multirange.json index e38ad82..a9e46c3 100644 --- a/types/multirange.json +++ b/types/multirange.json @@ -11,7 +11,7 @@ "childRelationships": [], "foreignField": [], "enum": [], - "mode": "int4range" + "mode": "int4multirange" }, "descriptor": [ { From e21e73d0517007dbd9f2fb080fd102f4b29185a3 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 15 Oct 2021 18:24:33 +0300 Subject: [PATCH 56/69] Added support to inherit multiple tables --- forward_engineering/ddlProvider.js | 31 ++++++++++++------- .../entity_level/entityLevelConfig.json | 18 ++++++++--- reverse_engineering/api.js | 28 +++++++++-------- .../helpers/postgresHelpers/tableHelper.js | 17 ++++++---- .../helpers/postgresService.js | 11 +++++-- 5 files changed, 66 insertions(+), 39 deletions(-) diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index c600c60..2236046 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -75,15 +75,16 @@ module.exports = (baseProvider, options, app) => { getColumnsList, }); - const { decorateType, decorateDefault, getColumnComments, replaceTypeByVersion } = require('./helpers/columnDefinitionHelper')({ - _, - wrap, - assignTemplates, - templates, - commentIfDeactivated, - getNamePrefixedWithSchemaName, - wrapComment, - }); + const { decorateType, decorateDefault, getColumnComments, replaceTypeByVersion } = + require('./helpers/columnDefinitionHelper')({ + _, + wrap, + assignTemplates, + templates, + commentIfDeactivated, + getNamePrefixedWithSchemaName, + wrapComment, + }); return { createDatabase({ databaseName, ifNotExist, comments, udfs, procedures }) { @@ -462,20 +463,26 @@ module.exports = (baseProvider, options, app) => { comments: containerData.description, udfs: data?.udfs || [], procedures: data?.procedures || [], - dbVersion + dbVersion, }; }, hydrateTable({ tableData, entityData, jsonSchema }) { const detailsTab = entityData[0]; - const inheritsTable = _.get(tableData, `relatedSchemas[${detailsTab.inherits}]`, ''); + const parentTables = _.chain(detailsTab.inherits) + .map(({ parentTable }) => _.get(tableData, `relatedSchemas[${parentTable}]`, '')) + .compact() + .map(table => table.code || table.collectionName) + .join(', ') + .thru(value => (value ? `(${value})` : '')) + .value(); const partitioning = _.first(detailsTab.partitioning) || {}; const compositePartitionKey = keyHelper.getKeys(partitioning.compositePartitionKey, jsonSchema); return { ...tableData, keyConstraints: keyHelper.getTableKeyConstraints(jsonSchema), - inherits: inheritsTable?.code || inheritsTable?.collectionName, + inherits: parentTables, selectStatement: _.trim(detailsTab.selectStatement), partitioning: _.assign({}, partitioning, { compositePartitionKey }), ..._.pick( diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index e60d0db..a586772 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -143,13 +143,21 @@ making sure that you maintain a proper JSON format. "defaultValue": true }, { - "propertyName": "Inherits parent table", + "propertyName": "Inherits parent tables", "propertyKeyword": "inherits", + "propertyType": "group", "propertyTooltip": "To create a table with the same definition as another table, including columns, indexes, and table options. Foreign key definitions, as well as any DATA DIRECTORY or INDEX DIRECTORY table options specified on the original table, will NOT be created.", - "propertyType": "selecthashed", - "template": "entities", - "withEmptyOption": true, - "excludeCurrent": true + "structure": [ + { + "propertyName": "Table name", + "propertyKeyword": "parentTable", + "propertyTooltip": "To create a table with the same definition as another table, including columns, indexes, and table options. Foreign key definitions, as well as any DATA DIRECTORY or INDEX DIRECTORY table options specified on the original table, will NOT be created.", + "propertyType": "selecthashed", + "template": "entities", + "withEmptyOption": true, + "excludeCurrent": true + } + ] }, { "propertyName": "Partitioning", diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 04dc6a4..df690bc 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -127,19 +127,21 @@ module.exports = { Procedures: procedures, }; - const tablePackages = tables.map(entityData => ({ - dbName: schemaName, - collectionName: entityData.name, - documents: entityData.documents, - views: [], - emptyBucket: false, - entityLevel: entityData.entityLevel, - validation: { - jsonSchema: entityData.jsonSchema, - }, - bucketInfo, - modelDefinitions, - })); + const tablePackages = tables + .map(entityData => ({ + dbName: schemaName, + collectionName: entityData.name, + documents: entityData.documents, + views: [], + emptyBucket: false, + entityLevel: entityData.entityLevel, + validation: { + jsonSchema: entityData.jsonSchema, + }, + bucketInfo, + modelDefinitions, + })) + .sort(data => (app.require('lodash').isEmpty(data.entityLevel.inherits) ? -1 : 1)); if (views?.length) { const viewPackage = { diff --git a/reverse_engineering/helpers/postgresHelpers/tableHelper.js b/reverse_engineering/helpers/postgresHelpers/tableHelper.js index 70163c4..7ebfee1 100644 --- a/reverse_engineering/helpers/postgresHelpers/tableHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/tableHelper.js @@ -17,7 +17,7 @@ const prepareStorageParameters = (reloptions, tableToastOptions) => { const fillfactor = options.fillfactor; const parallel_workers = options.parallel_workers; const autovacuum_enabled = options.autovacuum_enabled; - const autovacuum = { + const autovacuum = clearEmptyPropertiesInObject({ vacuum_index_cleanup: options.vacuum_index_cleanup, vacuum_truncate: options.vacuum_truncate, autovacuum_vacuum_threshold: options.autovacuum_vacuum_threshold, @@ -35,10 +35,10 @@ const prepareStorageParameters = (reloptions, tableToastOptions) => { autovacuum_multixact_freeze_max_age: options.autovacuum_multixact_freeze_max_age, autovacuum_multixact_freeze_table_age: options.autovacuum_multixact_freeze_table_age, log_autovacuum_min_duration: options.log_autovacuum_min_duration, - }; + }); const user_catalog_table = options.user_catalog_table; const toast_autovacuum_enabled = toastOptions.autovacuum_enabled; - const toast = { + const toast = clearEmptyPropertiesInObject({ toast_tuple_target: options.toast_tuple_target, toast_vacuum_index_cleanup: toastOptions.vacuum_index_cleanup, toast_vacuum_truncate: toastOptions.vacuum_truncate, @@ -55,15 +55,15 @@ const prepareStorageParameters = (reloptions, tableToastOptions) => { toast_autovacuum_multixact_freeze_max_age: toastOptions.autovacuum_multixact_freeze_max_age, toast_autovacuum_multixact_freeze_table_age: toastOptions.autovacuum_multixact_freeze_table_age, toast_log_autovacuum_min_duration: toastOptions.log_autovacuum_min_duration, - }; + }); const storage_parameter = { fillfactor, parallel_workers, autovacuum_enabled, - autovacuum: clearEmptyPropertiesInObject(autovacuum), + autovacuum: _.isEmpty(autovacuum) ? null : autovacuum, toast_autovacuum_enabled, - toast: clearEmptyPropertiesInObject(toast), + toast: _.isEmpty(toast) ? null : toast, user_catalog_table, }; @@ -334,6 +334,10 @@ const prepareOptions = options => { ); }; +const prepareTableInheritance = (schemaName, inheritanceResult) => { + return _.map(inheritanceResult, ({ parent_table_name }) => ({ parentTable: [schemaName, parent_table_name] })); +}; + module.exports = { prepareStorageParameters, prepareTablePartition, @@ -343,4 +347,5 @@ module.exports = { prepareTableLevelData, prepareTableIndexes, getLimit, + prepareTableInheritance, }; diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index f0b2c1a..42b8fdb 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -27,6 +27,7 @@ const { getLimit, prepareTableLevelData, prepareTableIndexes, + prepareTableInheritance, } = require('./postgresHelpers/tableHelper'); const { setDependencies: setDependenciesInUserDefinedTypesHelper, @@ -212,11 +213,15 @@ module.exports = { ); const tableOid = tableLevelData?.oid; - const tableToastOptions = await db.queryTolerant(queryConstants.GET_TABLE_TOAST_OPTIONS, [tableName, schemaOid], true); + const tableToastOptions = await db.queryTolerant( + queryConstants.GET_TABLE_TOAST_OPTIONS, + [tableName, schemaOid], + true + ); const partitionResult = await db.queryTolerant(queryConstants.GET_TABLE_PARTITION_DATA, [tableOid], true); const tableColumns = await this._getTableColumns(tableName, schemaName, tableOid); const descriptionResult = await db.queryTolerant(queryConstants.GET_DESCRIPTION_BY_OID, [tableOid], true); - const inheritsResult = await db.queryTolerant(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid], true); + const inheritsResult = await db.queryTolerant(queryConstants.GET_INHERITS_PARENT_TABLE_NAME, [tableOid]); const tableConstraintsResult = await db.queryTolerant(queryConstants.GET_TABLE_CONSTRAINTS, [tableOid]); const tableIndexesResult = await db.queryTolerant(queryConstants.GET_TABLE_INDEXES, [tableOid]); const tableForeignKeys = await db.queryTolerant(queryConstants.GET_TABLE_FOREIGN_KEYS, [tableOid]); @@ -224,7 +229,7 @@ module.exports = { const partitioning = prepareTablePartition(partitionResult, tableColumns); const tableLevelProperties = prepareTableLevelData(tableLevelData, tableToastOptions); const description = getDescriptionFromResult(descriptionResult); - const inherits = inheritsResult?.parent_table_name ? [schemaName, inheritsResult?.parent_table_name] : null; + const inherits = prepareTableInheritance(schemaName, inheritsResult); const tableConstraint = prepareTableConstraints(tableConstraintsResult, tableColumns); const tableIndexes = prepareTableIndexes(tableIndexesResult); const relationships = prepareForeignKeys(tableForeignKeys, tableName, schemaName, tableColumns); From acbe4697319973a47aa5ba54d28a36d9070c2480 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 18 Oct 2021 14:18:39 +0300 Subject: [PATCH 57/69] RE: fixed issue issue with re of and properties --- reverse_engineering/helpers/getJsonSchema.js | 6 +++++- reverse_engineering/helpers/postgresHelpers/columnHelper.js | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/helpers/getJsonSchema.js b/reverse_engineering/helpers/getJsonSchema.js index 0669bb3..c846b37 100644 --- a/reverse_engineering/helpers/getJsonSchema.js +++ b/reverse_engineering/helpers/getJsonSchema.js @@ -16,7 +16,11 @@ const getJsonSchema = columns => { }; }, {}); - return { properties }; + const required = Object.entries(properties) + .filter(([filedName, field]) => field.required) + .map(([fieldName]) => fieldName); + + return { properties, required }; }; module.exports = { diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index 60d40e3..d66170d 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -5,7 +5,7 @@ const setDependencies = app => { }; const columnPropertiesMapper = { - columns_default: 'default', + column_default: 'default', is_nullable: { keyword: 'required', values: { From 4fbda3d54bd2075497cf013d133897ab9b955b02 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 19 Oct 2021 11:23:38 +0300 Subject: [PATCH 58/69] Added support of domain types including FE and RE --- forward_engineering/configs/templates.js | 1 + forward_engineering/configs/types.js | 11 ++- forward_engineering/ddlProvider.js | 2 + forward_engineering/helpers/udtHelper.js | 27 +++++++ .../field_level/fieldLevelConfig.json | 64 ++++++++++++++++ .../helpers/postgresHelpers/columnHelper.js | 5 ++ .../postgresHelpers/userDefinedTypesHelper.js | 75 ++++++++++++++++++- .../helpers/postgresService.js | 14 +++- reverse_engineering/helpers/queryConstants.js | 10 +++ types/domain.json | 11 +++ types/object.json | 3 +- 11 files changed, 216 insertions(+), 7 deletions(-) create mode 100644 types/domain.json diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index e7bbc4c..0d9221d 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -44,4 +44,5 @@ module.exports = { createCompositeType: 'CREATE TYPE ${name} AS (\n\t${columnDefinitions}\n);\n${comment}\n', createEnumType: 'CREATE TYPE ${name} AS ENUM (${values});\n${comment}\n', createRangeType: 'CREATE TYPE ${name} AS RANGE (\n\tSUBTYPE=${subtype}${options}\n);\n${comment}\n', + createDomainType: 'CREATE DOMAIN ${name} AS ${underlyingType}${notNull}${collate}${default}${constraints};\n${comment}\n' }; diff --git a/forward_engineering/configs/types.js b/forward_engineering/configs/types.js index 25a62da..26443f1 100644 --- a/forward_engineering/configs/types.js +++ b/forward_engineering/configs/types.js @@ -16,11 +16,11 @@ module.exports = { size: 1, mode: 'varying', }, - "tsvector": { - mode: 'text' + tsvector: { + mode: 'text', }, - "tsquery": { - mode: 'text' + tsquery: { + mode: 'text', }, smallint: { capacity: 2, @@ -197,4 +197,7 @@ module.exports = { range_udt: { mode: 'range', }, + domain: { + mode: 'domain', + }, }; diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 2236046..21e7050 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -430,6 +430,8 @@ module.exports = (baseProvider, options, app) => { subtypeDiffFunction: jsonSchema.subtypeDiffFunction, multiRangeType: jsonSchema.multiRangeType, databaseName: dbData.databaseName, + underlyingType: jsonSchema.underlyingType, + checkConstraints: jsonSchema.checkConstraints, collationRule, timePrecision, with_timezone, diff --git a/forward_engineering/helpers/udtHelper.js b/forward_engineering/helpers/udtHelper.js index dfa79bf..fc90690 100644 --- a/forward_engineering/helpers/udtHelper.js +++ b/forward_engineering/helpers/udtHelper.js @@ -34,6 +34,23 @@ module.exports = ({ options: getRangeOptions(udt), comment: udt.comment ? comment : '', }); + case 'domain': { + const comment = assignTemplates(templates.comment, { + object: 'DOMAIN', + objectName: udtName, + comment: wrapComment(udt.comment), + }); + + return assignTemplates(templates.createDomainType, { + name: udtName, + underlyingType: udt.underlyingType, + notNull: !udt.nullable ? ' NOT NULL' : '', + collate: udt.collation ? `\n\tCOLLATE ${udt.collation}` : '', + default: udt.default ? `\n\tDEFAULT ${udt.default}` : '', + constraints: getDomainConstraints(udt), + comment: udt.comment ? comment : '', + }); + } default: return ''; } @@ -56,6 +73,16 @@ module.exports = ({ return _.trim(statements) ? ',\n\t' + _.trim(statements) : ''; }; + const getDomainConstraints = udt => { + return _.map(udt.checkConstraints, constraint => { + if (constraint.name) { + return `\n\tCONSTRAINT ${constraint.name} CHECK (${constraint.expression})`; + } + + return `\n\tCHECK (${constraint.expression})`; + }).join(''); + }; + const getBasicValue = prefix => value => { if (value) { return `${prefix}=${value}`; diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 67528ae..38a7130 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -3710,6 +3710,70 @@ making sure that you maintain a proper JSON format. "template": "textarea" } ], + "domain": [ + "name", + "code", + "schemaId", + "type", + { + "propertyName": "Underlying type", + "propertyKeyword": "underlyingType", + "propertyTooltip": "The underlying data type of the domain.", + "propertyType": "text" + }, + { + "propertyName": "Collation", + "propertyKeyword": "collation", + "propertyTooltip": "An optional collation for the domain. If no collation is specified, the underlying data type's default collation is used. The underlying type must be collatable if COLLATE is specified.", + "propertyType": "text" + }, + { + "propertyName": "Not null", + "propertyKeyword": "required", + "enableForReference": true, + "propertyType": "checkbox" + }, + "default", + { + "propertyName": "Check constraints", + "propertyKeyword": "checkConstraints", + "propertyType": "group", + "propertyTooltip": "CHECK clauses specify integrity constraints or tests which values of the domain must satisfy. Each constraint must be an expression producing a Boolean result. ", + "structure": [ + { + "propertyName": "Constraint name", + "propertyKeyword": "name", + "propertyType": "text" + }, + { + "propertyName": "Check expression", + "propertyKeyword": "expression", + "propertyType": "details", + "template": "textarea", + "markdown": false + } + ] + }, + { + "propertyName": "Comments", + "propertyKeyword": "description", + "propertyTooltip": "comments", + "addTimestampButton": false, + "propertyType": "details", + "template": "textarea" + }, + "enum", + "sample", + { + "propertyName": "Remarks", + "propertyKeyword": "comments", + "shouldValidate": false, + "propertyTooltip": "remarks", + "addTimestampButton": true, + "propertyType": "details", + "template": "textarea" + } + ], "___2": [] } } \ No newline at end of file diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index d66170d..d0f38e8 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -25,6 +25,7 @@ const columnPropertiesMapper = { udt_name: 'udt_name', character_maximum_length: 'length', description: 'description', + domain_name: 'domain_name' }; const mapColumnData = userDefinedTypes => column => { @@ -54,6 +55,10 @@ const getType = (userDefinedTypes, column) => { return mapType(userDefinedTypes, column.udt_name); } + if (column.domain_name) { + return mapType(userDefinedTypes, column.domain_name); + } + return mapType(userDefinedTypes, column.type); }; diff --git a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js index 1cf853b..b1e13f6 100644 --- a/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/userDefinedTypesHelper.js @@ -6,7 +6,7 @@ const setDependencies = app => { _ = app.require('lodash'); }; -const getUserDefinedTypes = udtResponse => { +const getUserDefinedTypes = (udtResponse, domainTypes) => { return _.chain(udtResponse) .map(typeData => { switch (typeData.type) { @@ -21,6 +21,7 @@ const getUserDefinedTypes = udtResponse => { } }) .compact() + .concat(_.map(domainTypes, mapDomainType)) .value(); }; @@ -56,6 +57,78 @@ const getCompositeType = typeData => { const isTypeComposite = typeData => typeData.type === 'c'; +const mapDomainType = domain => { + return { + name: domain.domain_name, + type: 'domain', + underlyingType: _.flow( + setLength(domain), + setPrecisionAndScale(domain), + setIntervalType(domain), + setIntervalPrecision(domain) + )(getUnderlyingType(domain)), + collation: domain.collation_name || '', + default: domain.domain_default || '', + required: _.first(domain.constraints)?.not_null, + checkConstraints: _.map(domain.constraints, mapDomainConstraint), + }; +}; + +const getUnderlyingType = domain => { + if (domain.data_type === 'USER-DEFINED') { + return domain.udt_name; + } + + return domain.data_type; +}; + +const setLength = domain => type => { + if (domain.character_maximum_length) { + return `${type}(${domain.character_maximum_length})`; + } + + return type; +}; + +const setPrecisionAndScale = domain => type => { + if (type !== 'numeric') { + return type; + } + + if (_.isNumber(domain.numeric_precision) && _.isNumber(domain.numeric_scale)) { + return `${type}(${domain.numeric_precision},${domain.numeric_scale})`; + } + + if (_.isNumber(domain.numeric_precision)) { + return `${type}(${domain.numeric_precision})`; + } + + return type; +}; + +const setIntervalType = domain => type => { + if (domain.interval_type) { + return `${type} ${domain.interval_type}`; + } + + return type; +}; + +const setIntervalPrecision = domain => type => { + if (_.isNumber(domain.interval_precision)) { + return `${type}(${domain.interval_precision})`; + } + + return type; +}; + +const mapDomainConstraint = constraint => { + return { + name: constraint.constraint_name, + expression: constraint.expression, + }; +}; + module.exports = { setDependencies, getUserDefinedTypes, diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 42b8fdb..e5695b9 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -189,6 +189,8 @@ module.exports = { logger.progress('Get User-Defined Types', schemaName); const userDefinedTypes = await db.queryTolerant(queryConstants.GET_USER_DEFINED_TYPES, [schemaName]); + const domainTypes = await db.queryTolerant(queryConstants.GET_DOMAIN_TYPES, [schemaName]); + const udtsWithColumns = await mapPromises(userDefinedTypes, async typeData => { if (isTypeComposite(typeData)) { return { @@ -200,7 +202,17 @@ module.exports = { return typeData; }); - return getUserDefinedTypes(udtsWithColumns); + const domainTypesWithConstraints = await mapPromises(domainTypes, async typeData => { + return { + ...typeData, + constraints: await db.queryTolerant(queryConstants.GET_DOMAIN_TYPES_CONSTRAINTS, [ + typeData.domain_name, + schemaName, + ]), + }; + }); + + return getUserDefinedTypes(udtsWithColumns, domainTypesWithConstraints); }, async _retrieveSingleTableData(recordSamplingSettings, schemaOid, schemaName, userDefinedTypes, tableName) { diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index 65e0bae..cf71a02 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -237,6 +237,16 @@ const queryConstants = { GET_DB_NAME: 'SELECT current_database();', GET_DB_ENCODING: 'SHOW SERVER_ENCODING;', GET_DB_COLLATE_NAME: 'SELECT default_collate_name FROM information_schema.character_sets;', + GET_DOMAIN_TYPES: 'SELECT * FROM information_schema.domains WHERE domain_schema = $1', + GET_DOMAIN_TYPES_CONSTRAINTS: ` + SELECT pg_type.typname AS type_name, + pg_type.typnotnull AS not_null, + pg_constraint.conname AS constraint_name, + pg_catalog.pg_get_expr(pg_constraint.conbin, pg_constraint.conrelid) AS expression + FROM pg_catalog.pg_type AS pg_type + LEFT JOIN pg_catalog.pg_constraint AS pg_constraint ON (pg_constraint.contypid = pg_type.oid) + LEFT JOIN pg_catalog.pg_namespace AS pg_namespace ON (pg_namespace.oid = pg_type.typnamespace) + WHERE pg_type.typname = $1 AND pg_namespace.nspname = $2 AND pg_constraint.contype = 'c';`, }; const getQueryName = query => { diff --git a/types/domain.json b/types/domain.json new file mode 100644 index 0000000..234a5c1 --- /dev/null +++ b/types/domain.json @@ -0,0 +1,11 @@ +{ + "name": "domain", + "erdAbbreviation": "", + "dtdAbbreviation": "{dmn}", + "parentType": "string", + "hiddenOnEntity": [ + "collection", + "view" + ], + "defaultValues": {} +} \ No newline at end of file diff --git a/types/object.json b/types/object.json index 1db659c..b61fda9 100644 --- a/types/object.json +++ b/types/object.json @@ -27,7 +27,8 @@ "multirange", "enum", "composite", - "range_udt" + "range_udt", + "domain" ] } } From cd8c67d7ec6657a6e986ab2dacfe98750810e17a Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 19 Oct 2021 11:59:47 +0300 Subject: [PATCH 59/69] Fixed RE of timestamp precision, fixed issue with not handled --- forward_engineering/ddlProvider.js | 4 +-- .../helpers/columnDefinitionHelper.js | 10 +++--- .../field_level/fieldLevelConfig.json | 11 +++++-- .../helpers/postgresHelpers/columnHelper.js | 31 +++++++++++++++---- reverse_engineering/helpers/queryConstants.js | 3 +- 5 files changed, 42 insertions(+), 17 deletions(-) diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index 21e7050..b5a95c7 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -404,7 +404,7 @@ module.exports = (baseProvider, options, app) => { : ''; const timeTypes = ['time', 'timestamp']; const timePrecision = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.timePrecision : ''; - const with_timezone = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.with_timezone : ''; + const timezone = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.timezone : ''; const intervalOptions = columnDefinition.type === 'interval' ? jsonSchema.intervalOptions : ''; const dbVersion = dbData.dbVersion; @@ -434,7 +434,7 @@ module.exports = (baseProvider, options, app) => { checkConstraints: jsonSchema.checkConstraints, collationRule, timePrecision, - with_timezone, + timezone, intervalOptions, dbVersion, }; diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index 9116103..3df43e1 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -27,9 +27,9 @@ module.exports = ({ return type; }; - const addWithTimezone = (type, with_timezone) => { - if (with_timezone) { - return `${type} WITH TIME ZONE`; + const addWithTimezone = (type, timezone) => { + if (timezone) { + return `${type} ${timezone}`; } return type; @@ -49,9 +49,9 @@ module.exports = ({ return addPrecision(type, columnDefinition.precision); } else if ( canHaveTimePrecision(type) && - (_.isNumber(columnDefinition.timePrecision) || columnDefinition.with_timezone) + (_.isNumber(columnDefinition.timePrecision) || columnDefinition.timezone) ) { - return addWithTimezone(addPrecision(type, columnDefinition.timePrecision), columnDefinition.with_timezone); + return addWithTimezone(addPrecision(type, columnDefinition.timePrecision), columnDefinition.timezone); } return type; diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 38a7130..e8cab44 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -1216,9 +1216,14 @@ making sure that you maintain a proper JSON format. } }, { - "propertyName": "With timezone", - "propertyKeyword": "with_timezone", - "propertyType": "checkbox", + "propertyName": "Timezone", + "propertyKeyword": "timezone", + "propertyType": "select", + "options": [ + "", + "WITH TIME ZONE", + "WITHOUT TIME ZONE" + ], "dependency": { "key": "mode", "value": [ diff --git a/reverse_engineering/helpers/postgresHelpers/columnHelper.js b/reverse_engineering/helpers/postgresHelpers/columnHelper.js index d0f38e8..e3a1135 100644 --- a/reverse_engineering/helpers/postgresHelpers/columnHelper.js +++ b/reverse_engineering/helpers/postgresHelpers/columnHelper.js @@ -18,6 +18,10 @@ const columnPropertiesMapper = { numeric_precision: 'precision', numeric_scale: 'scale', datetime_precision: 'timePrecision', + attribute_mode: { + keyword: 'timePrecision', + check: (column, value) => value !== -1 && canHaveTimePrecision(column.data_type), + }, interval_type: 'intervalOptions', collation_name: 'collationRule', column_name: 'name', @@ -25,7 +29,15 @@ const columnPropertiesMapper = { udt_name: 'udt_name', character_maximum_length: 'length', description: 'description', - domain_name: 'domain_name' + domain_name: 'domain_name', +}; + +const getColumnValue = (column, key, value) => { + if (columnPropertiesMapper[key]?.check) { + return columnPropertiesMapper[key].check(column, value) ? value : ''; + } + + return _.get(columnPropertiesMapper, `${key}.values.${value}`, value); }; const mapColumnData = userDefinedTypes => column => { @@ -33,7 +45,7 @@ const mapColumnData = userDefinedTypes => column => { .toPairs() .map(([key, value]) => [ columnPropertiesMapper[key]?.keyword || columnPropertiesMapper[key], - _.get(columnPropertiesMapper, `${key}.values.${value}`, value), + getColumnValue(column, key, value), ]) .filter(([key, value]) => key && !_.isNil(value)) .fromPairs() @@ -130,14 +142,14 @@ const mapType = (userDefinedTypes, type) => { return { type: 'datetime', mode: type }; case 'timestamptz': case 'timestamp with time zone': - return { type: 'datetime', mode: 'timestamp', with_timezone: true }; + return { type: 'datetime', mode: 'timestamp', timezone: 'WITH TIME ZONE' }; case 'timestamp without time zone': - return { type: 'datetime', mode: 'timestamp' }; + return { type: 'datetime', mode: 'timestamp', timezone: 'WITHOUT TIME ZONE' }; case 'timetz': case 'time with time zone': - return { type: 'datetime', mode: 'time', with_timezone: true }; + return { type: 'datetime', mode: 'time', timezone: 'WITH TIME ZONE' }; case 'time without time zone': - return { type: 'datetime', mode: 'time' }; + return { type: 'datetime', mode: 'time', timezone: 'WITHOUT TIME ZONE' }; case 'json': case 'jsonb': return { type: 'json', mode: type, subtype: 'object' }; @@ -226,6 +238,13 @@ const getParsedJsonValueType = value => { return type; }; +const canHaveTimePrecision = columnDataType => { + return _.includes( + ['timestamp with time zone', 'timestamp without time zone', 'time with time zone', 'time without time zone'], + columnDataType + ); +}; + module.exports = { setDependencies, mapColumnData, diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index cf71a02..d7701d2 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -35,7 +35,8 @@ const queryConstants = { GET_TABLE_COLUMNS_ADDITIONAL_DATA: ` SELECT pg_attribute.attname AS name, pg_attribute.attndims AS number_of_array_dimensions, - pg_description.description + pg_description.description, + pg_attribute.atttypmod AS attribute_mode FROM pg_catalog.pg_attribute AS pg_attribute LEFT JOIN pg_catalog.pg_description AS pg_description ON (pg_description.objsubid=pg_attribute.attnum AND pg_description.objoid = pg_attribute.attrelid) From 95ca93bd93809905f9bd5650f6d7968d8578ec92 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 19 Oct 2021 14:17:52 +0300 Subject: [PATCH 60/69] RE: added support to select database after connection to PostgreSQL server --- reverse_engineering/api.js | 28 +++++ reverse_engineering/config.json | 1 + .../connectionSettingsModalConfig.json | 102 ++++++++++++------ .../helpers/connectionHelper.js | 2 +- reverse_engineering/helpers/db.js | 4 + .../helpers/postgresService.js | 8 ++ reverse_engineering/helpers/queryConstants.js | 2 + 7 files changed, 112 insertions(+), 35 deletions(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index df690bc..a1ab025 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,6 +1,7 @@ 'use strict'; const { createLogger } = require('./helpers/loggerHelper'); +const { connect } = require('./helpers/postgresService'); const postgresService = require('./helpers/postgresService'); module.exports = { @@ -32,6 +33,33 @@ module.exports = { } }, + async getDatabases(connectionInfo, logger, cb, app) { + try { + logger.clear(); + logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); + + const postgresLogger = createLogger({ + title: 'Get DB collections names', + hiddenKeys: connectionInfo.hiddenKeys, + logger, + }); + + postgresService.setDependencies(app); + await postgresService.connect(connectionInfo, postgresLogger); + + const dbs = await postgresService.getDatabaseNames(); + logger.log('info', dbs, 'All databases list', connectionInfo.hiddenKeys); + return cb(null, dbs); + } catch (err) { + logger.log('error', err); + return cb(mapError(err)); + } + }, + + getDocumentKinds: function (connectionInfo, logger, cb) { + cb(null, []); + }, + async getDbCollectionsNames(connectionInfo, logger, callback, app) { try { logger.clear(); diff --git a/reverse_engineering/config.json b/reverse_engineering/config.json index bfb9eee..278339e 100644 --- a/reverse_engineering/config.json +++ b/reverse_engineering/config.json @@ -4,6 +4,7 @@ "WRONG_CONNECTION": "Can not connect to PostgreSQL Server instance" }, "defaultDdlType": "postgres", + "scenario": "getDatabases", "excludeDocKind": [ "id" ], diff --git a/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json b/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json index 17728d0..a67452e 100644 --- a/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json +++ b/reverse_engineering/connection_settings_modal/connectionSettingsModalConfig.json @@ -24,12 +24,13 @@ "defaultValue": 5432 }, { - "inputLabel": "Database name", - "inputKeyword": "databaseName", - "description": "Database name (optional)", + "inputLabel": "Maintenance database", + "inputKeyword": "maintenanceDatabase", + "description": "Maintenance database name (default: postgres)", "inputType": "text", - "inputPlaceholder": "Database name", - "regex": "([^\\s])" + "inputPlaceholder": "Maintenance database", + "regex": "([^\\s])", + "default": "postgres" } ] }, @@ -64,30 +65,41 @@ "inputLabel": "SSL", "inputKeyword": "sslType", "inputType": "select", - "options": [{ - "value": "Off", - "label": "Off" - }, { - "value": "TRUST_ALL_CERTIFICATES", - "label": "Unvalidated" - },{ - "value": "TRUST_CUSTOM_CA_SIGNED_CERTIFICATES", - "label": "Server validation" - - },{ - "value": "TRUST_SERVER_CLIENT_CERTIFICATES", - "label": "Server and client validation" - }] + "options": [ + { + "value": "Off", + "label": "Off" + }, + { + "value": "TRUST_ALL_CERTIFICATES", + "label": "Unvalidated" + }, + { + "value": "TRUST_CUSTOM_CA_SIGNED_CERTIFICATES", + "label": "Server validation" + }, + { + "value": "TRUST_SERVER_CLIENT_CERTIFICATES", + "label": "Server and client validation" + } + ] }, { "inputLabel": "Certificate Authority", "inputKeyword": "certAuthority", "inputType": "file", "inputPlaceholder": "Certificate Authority", - "extensions": ["pem", "crt", "key"], + "extensions": [ + "pem", + "crt", + "key" + ], "dependency": { "key": "sslType", - "value": ["TRUST_CUSTOM_CA_SIGNED_CERTIFICATES", "TRUST_SERVER_CLIENT_CERTIFICATES"] + "value": [ + "TRUST_CUSTOM_CA_SIGNED_CERTIFICATES", + "TRUST_SERVER_CLIENT_CERTIFICATES" + ] } }, { @@ -95,7 +107,11 @@ "inputKeyword": "clientCert", "inputType": "file", "inputPlaceholder": "Client Certificate", - "extensions": ["pem", "crt", "key"], + "extensions": [ + "pem", + "crt", + "key" + ], "dependency": { "key": "sslType", "value": "TRUST_SERVER_CLIENT_CERTIFICATES" @@ -106,7 +122,11 @@ "inputKeyword": "clientPrivateKey", "inputType": "file", "inputPlaceholder": "Client Private Key", - "extensions": ["pem", "crt", "key"], + "extensions": [ + "pem", + "crt", + "key" + ], "dependency": { "key": "sslType", "value": "TRUST_SERVER_CLIENT_CERTIFICATES" @@ -158,13 +178,16 @@ "inputLabel": "SSH Auth Method", "inputKeyword": "ssh_method", "inputType": "select", - "options": [{ - "value": "privateKey", - "label": "Private Key" - }, { - "value": "password", - "label": "Password" - }], + "options": [ + { + "value": "privateKey", + "label": "Private Key" + }, + { + "value": "password", + "label": "Password" + } + ], "disable": { "key": "ssh", "value": false @@ -176,10 +199,15 @@ "inputKeyword": "ssh_key_file", "inputType": "file", "inputPlaceholder": "Private Key", - "extensions": ["*"], + "extensions": [ + "*" + ], "dependency": { "key": "ssh_method", - "value": ["privateKey", ""] + "value": [ + "privateKey", + "" + ] }, "disable": { "key": "ssh", @@ -194,7 +222,10 @@ "isHiddenKey": true, "dependency": { "key": "ssh_method", - "value": ["privateKey", ""] + "value": [ + "privateKey", + "" + ] }, "disable": { "key": "ssh", @@ -209,7 +240,10 @@ "isHiddenKey": true, "dependency": { "key": "ssh_method", - "value": ["password", ""] + "value": [ + "password", + "" + ] }, "disable": { "key": "ssh", diff --git a/reverse_engineering/helpers/connectionHelper.js b/reverse_engineering/helpers/connectionHelper.js index e3d6deb..17dbb8d 100644 --- a/reverse_engineering/helpers/connectionHelper.js +++ b/reverse_engineering/helpers/connectionHelper.js @@ -87,7 +87,7 @@ const createConnectionPool = async connectionInfo => { connectionTimeoutMillis: Number(connectionInfo.queryRequestTimeout) || 60000, query_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, statement_timeout: Number(connectionInfo.queryRequestTimeout) || 60000, - database: connectionInfo.databaseName, + database: connectionInfo.database || connectionInfo.maintenanceDatabase, }; const pool = await new Pool(config); diff --git a/reverse_engineering/helpers/db.js b/reverse_engineering/helpers/db.js index d2f4744..21a264c 100644 --- a/reverse_engineering/helpers/db.js +++ b/reverse_engineering/helpers/db.js @@ -11,6 +11,10 @@ module.exports = { pool.on('error', error => newLogger.error(error)); }, + isPoolInitialized() { + return Boolean(pool); + }, + async releasePool() { if (pool) { await pool.end(); diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index e5695b9..6ee5f48 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -62,6 +62,10 @@ module.exports = { }, async connect(connectionInfo, specificLogger) { + if (db.isPoolInitialized()) { + await this.disconnect(); + } + const { pool, sshTunnel } = await createConnectionPool(connectionInfo); db.initializePool(pool, specificLogger); @@ -88,6 +92,10 @@ module.exports = { return db.query(script); }, + async getDatabaseNames() { + return _.map(await db.query(queryConstants.GET_DATABASES), 'database_name'); + }, + async logVersion() { const versionRow = await db.queryTolerant(queryConstants.GET_VERSION, [], true); const version = versionRow?.version || 'Version not retrieved'; diff --git a/reverse_engineering/helpers/queryConstants.js b/reverse_engineering/helpers/queryConstants.js index d7701d2..93d69e8 100644 --- a/reverse_engineering/helpers/queryConstants.js +++ b/reverse_engineering/helpers/queryConstants.js @@ -248,6 +248,8 @@ const queryConstants = { LEFT JOIN pg_catalog.pg_constraint AS pg_constraint ON (pg_constraint.contypid = pg_type.oid) LEFT JOIN pg_catalog.pg_namespace AS pg_namespace ON (pg_namespace.oid = pg_type.typnamespace) WHERE pg_type.typname = $1 AND pg_namespace.nspname = $2 AND pg_constraint.contype = 'c';`, + GET_DATABASES: + 'SELECT datname AS database_name FROM pg_catalog.pg_database WHERE datistemplate != TRUE AND datallowconn = TRUE;', }; const getQueryName = query => { From 91fd64a28525f43331318915b4fa677efa4bcbd9 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 19 Oct 2021 18:28:12 +0300 Subject: [PATCH 61/69] Apply to instance: added scenario to show database selector before apply to instance --- forward_engineering/api.js | 5 +++++ forward_engineering/config.json | 3 +++ 2 files changed, 8 insertions(+) diff --git a/forward_engineering/api.js b/forward_engineering/api.js index 630a9e8..e1d867d 100644 --- a/forward_engineering/api.js +++ b/forward_engineering/api.js @@ -3,6 +3,11 @@ const { createLogger } = require('../reverse_engineering/helpers/loggerHelper'); const applyToInstanceHelper = require('./applyToInstanceHelper'); module.exports = { + getDatabases(connectionInfo, logger, callback, app) { + logger.progress({ message: 'Find all databases' }); + + reApi.getDatabases(connectionInfo, logger, callback, app); + }, applyToInstance(connectionInfo, logger, callback, app) { logger.clear(); logger.log( diff --git a/forward_engineering/config.json b/forward_engineering/config.json index 357ecf8..ccbba7f 100644 --- a/forward_engineering/config.json +++ b/forward_engineering/config.json @@ -8,6 +8,9 @@ "view": true }, "applyScriptToInstance": true, + "applyToInstanceSettings": { + "scenario": "getDatabases" + }, "combinedContainers": true, "feLevelSelector": { "container": true, From f491da0c04c29b1cad267bca023f7fb238561f13 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Tue, 19 Oct 2021 18:28:39 +0300 Subject: [PATCH 62/69] RE: fixed log message --- reverse_engineering/api.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index a1ab025..887073f 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -39,7 +39,7 @@ module.exports = { logger.log('info', connectionInfo, 'connectionInfo', connectionInfo.hiddenKeys); const postgresLogger = createLogger({ - title: 'Get DB collections names', + title: 'Get DB names', hiddenKeys: connectionInfo.hiddenKeys, logger, }); From 151e0155e5c0b29e6717d3011de37ec1542f0ef6 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 20 Oct 2021 15:11:50 +0300 Subject: [PATCH 63/69] Fixed configs accroding to documentation --- .../field_level/fieldLevelConfig.json | 786 +++++++----------- .../model_level/modelLevelConfig.json | 39 +- 2 files changed, 352 insertions(+), 473 deletions(-) diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index e8cab44..ac9a518 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -253,7 +253,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -296,35 +296,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -431,31 +422,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -619,7 +602,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -662,35 +645,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -797,31 +771,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -953,7 +919,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -996,35 +962,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -1131,31 +1088,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -1211,7 +1160,8 @@ making sure that you maintain a proper JSON format. "key": "mode", "value": [ "time", - "timestamp" + "timestamp", + "interval" ] } }, @@ -1339,7 +1289,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -1382,35 +1332,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -1517,31 +1458,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -1715,7 +1648,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -1758,35 +1691,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -1893,31 +1817,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -2053,7 +1969,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -2096,35 +2012,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -2231,31 +2138,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -2455,7 +2354,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -2498,35 +2397,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -2633,31 +2523,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -2773,7 +2655,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -2816,35 +2698,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -2951,31 +2824,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -3112,7 +2977,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Primary key options", - "propertyType": "block", + "propertyType": "group", "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -3155,35 +3020,26 @@ making sure that you maintain a proper JSON format. { "propertyName": "Constraint name", "propertyKeyword": "constraintName", - "propertyTooltip": "", "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", @@ -3290,31 +3146,23 @@ making sure that you maintain a proper JSON format. "propertyType": "text" }, { - "propertyName": "Category", - "propertyKeyword": "indexCategory", - "propertyTooltip": "", - "propertyType": "select", - "defaultValue": "", - "options": [ - "", - "btree", - "hash", - "gist", - "spgist", - "gin", - "brin" - ] - }, - { - "propertyName": "Key order", - "propertyKeyword": "indexOrder", - "propertyTooltip": "", - "propertyType": "select", - "options": [ - "", - "ASC", - "DESC" - ] + "propertyName": "Include non-key columns", + "propertyKeyword": "indexInclude", + "propertyType": "fieldList", + "template": "orderedList", + "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." + }, + { + "propertyName": "With options", + "propertyKeyword": "indexWithOptions", + "propertyType": "details", + "template": "textarea", + "valueType": "string" + }, + { + "propertyName": "Tablespace", + "propertyKeyword": "indexTablespace", + "propertyType": "text" }, { "propertyName": "Comment", diff --git a/properties_pane/model_level/modelLevelConfig.json b/properties_pane/model_level/modelLevelConfig.json index e902a23..0e7147c 100644 --- a/properties_pane/model_level/modelLevelConfig.json +++ b/properties_pane/model_level/modelLevelConfig.json @@ -51,7 +51,6 @@ making sure that you maintain a proper JSON format. } */ - [ { "lowerTab": "Details", @@ -152,17 +151,49 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "Enter a database template name if applicable", "propertyType": "text" }, + { + "propertyName": "Locale", + "propertyKeyword": "locale", + "propertyTooltip": "Collation order (LC_COLLATE) to use in the new database. ", + "propertyType": "text" + }, { "propertyName": "Collation", "propertyKeyword": "LC_COLLATE", "propertyTooltip": "Collation order (LC_COLLATE) to use in the new database. ", - "propertyType": "text" + "propertyType": "text", + "dependency": { + "type": "or", + "values": [ + { + "key": "locale", + "exists": false + }, + { + "key": "locale", + "value": "" + } + ] + } }, { "propertyName": "Character type", "propertyKeyword": "LC_CTYPE", "propertyTooltip": "Character classification (LC_CTYPE) to use in the new database. ", - "propertyType": "text" + "propertyType": "text", + "dependency": { + "type": "or", + "values": [ + { + "key": "locale", + "exists": false + }, + { + "key": "locale", + "value": "" + } + ] + } }, { "propertyName": "Comments", @@ -175,4 +206,4 @@ making sure that you maintain a proper JSON format. } ] } -] +] \ No newline at end of file From 139316feef7e6a0cc99a880c859801dbe77706e5 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 20 Oct 2021 15:28:01 +0300 Subject: [PATCH 64/69] FE: fixed Fe of constrants defined on field level --- .../helpers/constraintsHelper.js | 4 +- forward_engineering/helpers/keyHelper.js | 16 +++---- .../field_level/fieldLevelConfig.json | 45 +++++++++++-------- 3 files changed, 37 insertions(+), 28 deletions(-) diff --git a/forward_engineering/helpers/constraintsHelper.js b/forward_engineering/helpers/constraintsHelper.js index 0ceb134..26df675 100644 --- a/forward_engineering/helpers/constraintsHelper.js +++ b/forward_engineering/helpers/constraintsHelper.js @@ -50,8 +50,8 @@ module.exports = ({ const constraintName = wrapInQuotes(_.trim(keyData.name)); const isAllColumnsDeactivated = checkAllKeysDeactivated(keyData.columns); const columns = getColumnsList(keyData.columns, isAllColumnsDeactivated, isParentActivated); - const includeNonKey = keyData.includeNonKey - ? ` INCLUDE (${getColumnsList(keyData.include, isAllColumnsDeactivated, isParentActivated)})` + const includeNonKey = keyData.include + ? ` INCLUDE${getColumnsList(keyData.include, isAllColumnsDeactivated, isParentActivated)}` : ''; const storageParameters = keyData.storageParameters ? ` WITH (${keyData.storageParameters})` : ''; const tablespace = keyData.tablespace ? ` USING INDEX TABLESPACE ${wrapInQuotes(keyData.tablespace)}` : ''; diff --git a/forward_engineering/helpers/keyHelper.js b/forward_engineering/helpers/keyHelper.js index 26bf852..fceb193 100644 --- a/forward_engineering/helpers/keyHelper.js +++ b/forward_engineering/helpers/keyHelper.js @@ -33,7 +33,7 @@ module.exports = (_, clean) => { return isPrimaryKey(column) && _.isEmpty(column.primaryKeyOptions); }; - const hydrateUniqueOptions = (options, columnName, isActivated) => + const hydrateUniqueOptions = (options, columnName, isActivated, jsonSchema) => clean({ keyType: 'UNIQUE', name: options['constraintName'], @@ -43,13 +43,13 @@ module.exports = (_, clean) => { isActivated: isActivated, }, ], - include: options['include'], + include: getKeys(options['include'] || options['indexInclude'] || [], jsonSchema), storageParameters: options['indexStorageParameters'], comment: options['indexComment'], tablespace: options['indexTablespace'], }); - const hydratePrimaryKeyOptions = (options, columnName, isActivated) => + const hydratePrimaryKeyOptions = (options, columnName, isActivated, jsonSchema) => clean({ keyType: 'PRIMARY KEY', name: options['constraintName'], @@ -59,7 +59,7 @@ module.exports = (_, clean) => { isActivated: isActivated, }, ], - include: options['include'], + include: getKeys(options['include'] || options['indexInclude'] || [], jsonSchema), storageParameters: options['indexStorageParameters'], comment: options['indexComment'], tablespace: options['indexTablespace'], @@ -94,7 +94,7 @@ module.exports = (_, clean) => { return jsonSchema.primaryKey .filter(primaryKey => !_.isEmpty(primaryKey.compositePrimaryKey)) .map(primaryKey => ({ - ...hydratePrimaryKeyOptions(primaryKey), + ...hydratePrimaryKeyOptions(primaryKey, null, null, jsonSchema), columns: getKeys(primaryKey.compositePrimaryKey, jsonSchema), })); }; @@ -107,7 +107,7 @@ module.exports = (_, clean) => { return jsonSchema.uniqueKey .filter(uniqueKey => !_.isEmpty(uniqueKey.compositeUniqueKey)) .map(uniqueKey => ({ - ...hydrateUniqueOptions(uniqueKey), + ...hydrateUniqueOptions(uniqueKey, null, null, jsonSchema), columns: getKeys(uniqueKey.compositeUniqueKey, jsonSchema), })); }; @@ -124,7 +124,7 @@ module.exports = (_, clean) => { return; } - return hydratePrimaryKeyOptions(schema.primaryKeyOptions, name, schema.isActivated); + return hydratePrimaryKeyOptions(_.first(schema.primaryKeyOptions), name, schema.isActivated, jsonSchema); }).filter(Boolean); const uniqueKeyConstraints = _.flatten( @@ -136,7 +136,7 @@ module.exports = (_, clean) => { } return schema.uniqueKeyOptions.map(uniqueKey => - hydrateUniqueOptions(uniqueKey, name, schema.isActivated) + hydrateUniqueOptions(uniqueKey, name, schema.isActivated, jsonSchema) ); }) ).filter(Boolean); diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index ac9a518..01ea571 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -254,6 +254,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -307,7 +308,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -430,7 +431,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -603,6 +604,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -656,7 +658,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -779,7 +781,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -920,6 +922,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -973,7 +976,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -1096,7 +1099,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -1290,6 +1293,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -1343,7 +1347,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -1466,7 +1470,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -1649,6 +1653,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -1702,7 +1707,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -1825,7 +1830,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -1970,6 +1975,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -2023,7 +2029,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -2146,7 +2152,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -2355,6 +2361,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -2408,7 +2415,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -2531,7 +2538,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -2656,6 +2663,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -2709,7 +2717,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -2832,7 +2840,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -2978,6 +2986,7 @@ making sure that you maintain a proper JSON format. { "propertyName": "Primary key options", "propertyType": "group", + "groupItemLimit": 1, "propertyKeyword": "primaryKeyOptions", "enableForReference": true, "propertyTooltip": "Primary key options", @@ -3031,7 +3040,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" @@ -3154,7 +3163,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "With options", - "propertyKeyword": "indexWithOptions", + "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", "valueType": "string" From 5dae493d8f830ceb2d264b86fc99cdecf9f60a60 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Thu, 21 Oct 2021 17:52:13 +0300 Subject: [PATCH 65/69] FE: added generation of database script, adjusted ddlProvider to work with schemaData --- forward_engineering/configs/templates.js | 12 +-- forward_engineering/ddlProvider.js | 87 ++++++++++++++----- forward_engineering/helpers/databaseHelper.js | 23 +++++ forward_engineering/helpers/udtHelper.js | 2 +- package.json | 2 +- .../field_level/fieldLevelConfig.json | 80 ++++++++++------- 6 files changed, 144 insertions(+), 62 deletions(-) create mode 100644 forward_engineering/helpers/databaseHelper.js diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 0d9221d..6646bc5 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -1,10 +1,12 @@ module.exports = { + createDatabase: 'CREATE DATABASE ${name}${template}${encoding}${locale}${collate}${characterClassification}${tablespace};\n', + createSchema: 'CREATE SCHEMA${ifNotExist} ${name};\nSET search_path TO ${name};\n${comment}\n', comment: 'COMMENT ON ${object} ${objectName} IS ${comment};\n', createTable: - '\nCREATE${temporary} TABLE${ifNotExist} ${name} (\n' + + 'CREATE${temporary} TABLE${ifNotExist} ${name} (\n' + '${columnDefinitions}${keyConstraints}${checkConstraints}${foreignKeyConstraints}\n' + ')${options};\n${comment}${columnDescriptions}', @@ -41,8 +43,8 @@ module.exports = { '\tLANGUAGE ${language}\n' + 'AS $BODY$\n${body}\n$BODY$;\n', - createCompositeType: 'CREATE TYPE ${name} AS (\n\t${columnDefinitions}\n);\n${comment}\n', - createEnumType: 'CREATE TYPE ${name} AS ENUM (${values});\n${comment}\n', - createRangeType: 'CREATE TYPE ${name} AS RANGE (\n\tSUBTYPE=${subtype}${options}\n);\n${comment}\n', - createDomainType: 'CREATE DOMAIN ${name} AS ${underlyingType}${notNull}${collate}${default}${constraints};\n${comment}\n' + createCompositeType: 'CREATE TYPE ${name} AS (\n\t${columnDefinitions}\n);\n${comment}', + createEnumType: 'CREATE TYPE ${name} AS ENUM (${values});\n${comment}', + createRangeType: 'CREATE TYPE ${name} AS RANGE (\n\tSUBTYPE=${subtype}${options}\n);\n${comment}', + createDomainType: 'CREATE DOMAIN ${name} AS ${underlyingType}${notNull}${collate}${default}${constraints};\n${comment}' }; diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index b5a95c7..631ed28 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -86,22 +86,42 @@ module.exports = (baseProvider, options, app) => { wrapComment, }); + const { getLocaleProperties } = require('./helpers/databaseHelper')(); + return { - createDatabase({ databaseName, ifNotExist, comments, udfs, procedures }) { + createDatabase(modelData) { + if (!modelData.databaseName) { + return ''; + } + + const { locale, collate, characterClassification } = getLocaleProperties(modelData); + + return assignTemplates(templates.createDatabase, { + name: wrapInQuotes(modelData.databaseName), + template: modelData.template ? `\n\tTEMPLATE ${modelData.template}` : '', + encoding: modelData.encoding ? `\n\tENCODING ${modelData.encoding}` : '', + locale: locale ? `\n\tLOCALE '${modelData.locale}'` : '', + collate: collate ? `\n\tLC_COLLATE '${modelData.collate}'` : '', + characterClassification: characterClassification ? `\n\tLC_CTYPE '${characterClassification}'` : '', + tablespace: modelData.tablespace ? `\n\tTABLESPACE '${modelData.tablespace}'` : '', + }); + }, + + createSchema({ schemaName, ifNotExist, comments, udfs, procedures }) { const comment = assignTemplates(templates.comment, { object: 'SCHEMA', - objectName: wrapInQuotes(databaseName), + objectName: wrapInQuotes(schemaName), comment: wrapComment(comments), }); const schemaStatement = assignTemplates(templates.createSchema, { - name: wrapInQuotes(databaseName), + name: wrapInQuotes(schemaName), ifNotExist: ifNotExist ? ' IF NOT EXISTS' : '', comment: comments ? comment : '', }); - const createFunctionStatement = getFunctionsScript(databaseName, udfs); - const createProceduresStatement = getProceduresScript(databaseName, procedures); + const createFunctionStatement = getFunctionsScript(schemaName, udfs); + const createProceduresStatement = getProceduresScript(schemaName, procedures); return _.trim([schemaStatement, createFunctionStatement, createProceduresStatement].join('\n\n')); }, @@ -113,6 +133,7 @@ module.exports = (baseProvider, options, app) => { checkConstraints, foreignKeyConstraints, dbData, + schemaData, columnDefinitions, relatedSchemas, keyConstraints, @@ -131,7 +152,7 @@ module.exports = (baseProvider, options, app) => { isActivated ) { const ifNotExistStr = ifNotExist ? ' IF NOT EXISTS' : ''; - const tableName = getNamePrefixedWithSchemaName(name, dbData.databaseName); + const tableName = getNamePrefixedWithSchemaName(name, schemaData.schemaName); const comment = assignTemplates(templates.comment, { object: 'TABLE', objectName: tableName, @@ -225,7 +246,7 @@ module.exports = (baseProvider, options, app) => { using, keys, options, - tableName: getNamePrefixedWithSchemaName(tableName, dbData.databaseName), + tableName: getNamePrefixedWithSchemaName(tableName, index.schemaName), }), { isActivated: index.isActivated, @@ -252,7 +273,8 @@ module.exports = (baseProvider, options, app) => { foreignSchemaName, primarySchemaName, }, - dbData + dbData, + schemaData ) { const isAllPrimaryKeysDeactivated = checkAllKeysDeactivated(primaryKey); const isAllForeignKeysDeactivated = checkAllKeysDeactivated(foreignKey); @@ -263,7 +285,7 @@ module.exports = (baseProvider, options, app) => { foreignTableActivated; const foreignKeyStatement = assignTemplates(templates.createForeignKeyConstraint, { - primaryTable: getNamePrefixedWithSchemaName(primaryTable, primarySchemaName || dbData.databaseName), + primaryTable: getNamePrefixedWithSchemaName(primaryTable, primarySchemaName || schemaData.schemaName), name: name ? `CONSTRAINT ${wrapInQuotes(name)}` : '', foreignKey: isActivated ? foreignKeysToString(foreignKey) : foreignActiveKeysToString(foreignKey), primaryKey: isActivated ? foreignKeysToString(primaryKey) : foreignActiveKeysToString(primaryKey), @@ -287,7 +309,8 @@ module.exports = (baseProvider, options, app) => { foreignSchemaName, primarySchemaName, }, - dbData + dbData, + schemaData ) { const isAllPrimaryKeysDeactivated = checkAllKeysDeactivated(primaryKey); const isAllForeignKeysDeactivated = checkAllKeysDeactivated(foreignKey); @@ -298,8 +321,8 @@ module.exports = (baseProvider, options, app) => { foreignTableActivated; const foreignKeyStatement = assignTemplates(templates.createForeignKey, { - primaryTable: getNamePrefixedWithSchemaName(primaryTable, primarySchemaName || dbData.databaseName), - foreignTable: getNamePrefixedWithSchemaName(foreignTable, foreignSchemaName || dbData.databaseName), + primaryTable: getNamePrefixedWithSchemaName(primaryTable, primarySchemaName || schemaData.schemaName), + foreignTable: getNamePrefixedWithSchemaName(foreignTable, foreignSchemaName || schemaData.schemaName), name: name ? wrapInQuotes(name) : '', foreignKey: isActivated ? foreignKeysToString(foreignKey) : foreignActiveKeysToString(foreignKey), primaryKey: isActivated ? foreignKeysToString(primaryKey) : foreignActiveKeysToString(primaryKey), @@ -312,7 +335,7 @@ module.exports = (baseProvider, options, app) => { }, createView(viewData, dbData, isActivated) { - const viewName = getNamePrefixedWithSchemaName(viewData.name, dbData.databaseName); + const viewName = getNamePrefixedWithSchemaName(viewData.name, viewData.schemaName); const comment = assignTemplates(templates.comment, { object: 'VIEW', @@ -322,7 +345,7 @@ module.exports = (baseProvider, options, app) => { const allDeactivated = checkAllKeysDeactivated(viewData.keys || []); const deactivatedWholeStatement = allDeactivated || !isActivated; - const { columns, tables } = getViewData(viewData.keys, dbData); + const { columns, tables } = getViewData(viewData.keys); let columnsAsString = columns.map(column => column.statement).join(',\n\t\t'); if (!deactivatedWholeStatement) { @@ -376,11 +399,11 @@ module.exports = (baseProvider, options, app) => { ); }, - createViewIndex(viewName, index, dbData, isParentActivated) { + createViewIndex() { return ''; }, - createUdt(udt, dbData) { + createUdt(udt) { const columns = _.map(udt.properties, this.convertColumnDefinition); return getUserDefinedType(udt, columns); @@ -398,7 +421,22 @@ module.exports = (baseProvider, options, app) => { return hasType(types, type); }, - hydrateColumn({ columnDefinition, jsonSchema, dbData }) { + hydrateDatabase({ modelData }) { + modelData = _.get(modelData, '0', {}); + + return { + databaseName: modelData.database_name, + tablespace: modelData.tablespace_name, + encoding: modelData.encoding, + template: modelData.template, + collate: modelData.LC_COLLATE, + characterClassification: modelData.LC_CTYPE, + dbVersion: modelData.dbVersion, + locale: modelData.locale, + }; + }, + + hydrateColumn({ columnDefinition, jsonSchema, schemaData }) { const collationRule = _.includes(['char', 'varchar', 'text'], columnDefinition.type) ? jsonSchema.collationRule : ''; @@ -406,7 +444,7 @@ module.exports = (baseProvider, options, app) => { const timePrecision = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.timePrecision : ''; const timezone = _.includes(timeTypes, columnDefinition.type) ? jsonSchema.timezone : ''; const intervalOptions = columnDefinition.type === 'interval' ? jsonSchema.intervalOptions : ''; - const dbVersion = dbData.dbVersion; + const dbVersion = schemaData.dbVersion; return { name: columnDefinition.name, @@ -429,7 +467,7 @@ module.exports = (baseProvider, options, app) => { canonicalFunction: jsonSchema.canonicalFunction, subtypeDiffFunction: jsonSchema.subtypeDiffFunction, multiRangeType: jsonSchema.multiRangeType, - databaseName: dbData.databaseName, + schemaName: schemaData.schemaName, underlyingType: jsonSchema.underlyingType, checkConstraints: jsonSchema.checkConstraints, collationRule, @@ -440,8 +478,8 @@ module.exports = (baseProvider, options, app) => { }; }, - hydrateIndex(indexData, tableData) { - return indexData; + hydrateIndex(indexData, tableData, schemaData) { + return { ...indexData, schemaName: schemaData.schemaName }; }, hydrateViewIndex(indexData) { @@ -456,11 +494,11 @@ module.exports = (baseProvider, options, app) => { }; }, - hydrateDatabase(containerData, data) { + hydrateSchema(containerData, data) { const dbVersion = _.get(data, 'modelData.0.dbVersion'); return { - databaseName: containerData.name, + schemaName: containerData.name, ifNotExist: containerData.ifNotExist, comments: containerData.description, udfs: data?.udfs || [], @@ -510,7 +548,7 @@ module.exports = (baseProvider, options, app) => { }; }, - hydrateView({ viewData, entityData, relatedSchemas, relatedContainers }) { + hydrateView({ viewData, entityData }) { const detailsTab = entityData[0]; return { @@ -524,6 +562,7 @@ module.exports = (baseProvider, options, app) => { selectStatement: detailsTab.selectStatement, withCheckOption: detailsTab.withCheckOption, checkTestingScope: detailsTab.withCheckOption ? detailsTab.checkTestingScope : '', + schemaName: viewData.schemaData.schemaName, }; }, diff --git a/forward_engineering/helpers/databaseHelper.js b/forward_engineering/helpers/databaseHelper.js new file mode 100644 index 0000000..cff6fc9 --- /dev/null +++ b/forward_engineering/helpers/databaseHelper.js @@ -0,0 +1,23 @@ +module.exports = () => { + const getLocaleProperties = modelData => { + const isSupportsLocale = ['v13.x', 'v14.x'].includes(modelData.dbVersion); + + if (isSupportsLocale && modelData.locale) { + return { locale: modelData.locale }; + } else if (!isSupportsLocale && modelData.locale) { + return { + collate: modelData.locale, + characterClassification: modelData.locale, + }; + } else { + return { + collate: modelData.collate, + characterClassification: modelData.characterClassification, + }; + } + }; + + return { + getLocaleProperties, + }; +}; diff --git a/forward_engineering/helpers/udtHelper.js b/forward_engineering/helpers/udtHelper.js index fc90690..53aa248 100644 --- a/forward_engineering/helpers/udtHelper.js +++ b/forward_engineering/helpers/udtHelper.js @@ -7,7 +7,7 @@ module.exports = ({ wrapComment, }) => { const getPlainUdt = (udt, columns) => { - const udtName = getNamePrefixedWithSchemaName(udt.name, udt.databaseName); + const udtName = getNamePrefixedWithSchemaName(udt.name, udt.schemaName); const comment = assignTemplates(templates.comment, { object: 'TYPE', objectName: udtName, diff --git a/package.json b/package.json index 6fc791c..715f0ea 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "versionDate": "2021-09-01", "author": "hackolade", "engines": { - "hackolade": "5.2.6", + "hackolade": "5.2.7", "hackoladePlugin": "1.2.0" }, "contributes": { diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 01ea571..71fcd2e 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -307,11 +307,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -430,11 +431,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -657,11 +659,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -780,11 +783,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -975,11 +979,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -1098,11 +1103,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -1346,11 +1352,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -1469,11 +1476,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -1706,11 +1714,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -1829,11 +1838,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -2028,11 +2038,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -2151,11 +2162,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -2418,7 +2430,8 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -2541,7 +2554,8 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -2720,7 +2734,8 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -2843,7 +2858,8 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -3043,7 +3059,8 @@ making sure that you maintain a proper JSON format. "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", @@ -3162,11 +3179,12 @@ making sure that you maintain a proper JSON format. "propertyTooltip": "The optional INCLUDE clause specifies a list of columns which will be included in the constraint as non-key columns." }, { - "propertyName": "With options", + "propertyName": "With storage parameters", "propertyKeyword": "indexStorageParameters", "propertyType": "details", "template": "textarea", - "valueType": "string" + "valueType": "string", + "markdown": false }, { "propertyName": "Tablespace", From a8e025a540bb21ffc1e609d810ddc91ae152a414 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 22 Oct 2021 14:16:08 +0300 Subject: [PATCH 66/69] RE: fixed not handled error --- reverse_engineering/api.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 887073f..3f6804f 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -52,7 +52,7 @@ module.exports = { return cb(null, dbs); } catch (err) { logger.log('error', err); - return cb(mapError(err)); + return cb(prepareError(err)); } }, From ec300da1a4b72aef136dbc9b3b4f9a48f87b2d39 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Fri, 22 Oct 2021 14:34:59 +0300 Subject: [PATCH 67/69] Apply to instance: removed createDb script before applying process --- forward_engineering/applyToInstanceHelper.js | 8 +++++++- forward_engineering/helpers/constraintsHelper.js | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/forward_engineering/applyToInstanceHelper.js b/forward_engineering/applyToInstanceHelper.js index be9feb6..29723db 100644 --- a/forward_engineering/applyToInstanceHelper.js +++ b/forward_engineering/applyToInstanceHelper.js @@ -5,13 +5,19 @@ const applyToInstance = async (connectionInfo, logger, app) => { postgresService.setDependencies(app); await postgresService.connect(connectionInfo, logger); await postgresService.logVersion(); - await postgresService.applyScript(connectionInfo.script); + await postgresService.applyScript(removeCreateDbScript(connectionInfo.script)); } catch (error) { logger.error(error); throw prepareError(error); } }; +const removeCreateDbScript = script => { + const createDbScriptRegexp = /CREATE DATABASE[^;]*;/gi; + + return script.replace(createDbScriptRegexp, ''); +}; + const prepareError = error => { error = JSON.stringify(error, Object.getOwnPropertyNames(error)); error = JSON.parse(error); diff --git a/forward_engineering/helpers/constraintsHelper.js b/forward_engineering/helpers/constraintsHelper.js index 26df675..923e7a3 100644 --- a/forward_engineering/helpers/constraintsHelper.js +++ b/forward_engineering/helpers/constraintsHelper.js @@ -50,7 +50,7 @@ module.exports = ({ const constraintName = wrapInQuotes(_.trim(keyData.name)); const isAllColumnsDeactivated = checkAllKeysDeactivated(keyData.columns); const columns = getColumnsList(keyData.columns, isAllColumnsDeactivated, isParentActivated); - const includeNonKey = keyData.include + const includeNonKey = keyData.include.length ? ` INCLUDE${getColumnsList(keyData.include, isAllColumnsDeactivated, isParentActivated)}` : ''; const storageParameters = keyData.storageParameters ? ` WITH (${keyData.storageParameters})` : ''; From 1cfb2096e098609f03651bbf3fa1797861d64966 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Mon, 25 Oct 2021 12:32:23 +0300 Subject: [PATCH 68/69] RE: fixed long test connection --- reverse_engineering/api.js | 4 +++- reverse_engineering/helpers/postgresService.js | 2 -- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 3f6804f..f78dab2 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,7 +1,6 @@ 'use strict'; const { createLogger } = require('./helpers/loggerHelper'); -const { connect } = require('./helpers/postgresService'); const postgresService = require('./helpers/postgresService'); module.exports = { @@ -24,6 +23,7 @@ module.exports = { await postgresService.connect(connectionInfo, postgresLogger); await postgresService.pingDb(); + await postgresService.logVersion(); callback(); } catch (error) { logger.log('error', prepareError(error), 'Test connection instance log'); @@ -46,6 +46,7 @@ module.exports = { postgresService.setDependencies(app); await postgresService.connect(connectionInfo, postgresLogger); + await postgresService.logVersion(); const dbs = await postgresService.getDatabaseNames(); logger.log('info', dbs, 'All databases list', connectionInfo.hiddenKeys); @@ -73,6 +74,7 @@ module.exports = { postgresService.setDependencies(app); await postgresService.connect(connectionInfo, postgresLogger); + await postgresService.logVersion(); const schemasNames = await postgresService.getAllSchemasNames(); const collections = await schemasNames.reduce(async (next, dbName) => { diff --git a/reverse_engineering/helpers/postgresService.js b/reverse_engineering/helpers/postgresService.js index 6ee5f48..22190cb 100644 --- a/reverse_engineering/helpers/postgresService.js +++ b/reverse_engineering/helpers/postgresService.js @@ -71,8 +71,6 @@ module.exports = { db.initializePool(pool, specificLogger); currentSshTunnel = sshTunnel; logger = specificLogger; - - await this.logVersion(); }, async disconnect() { From 45abcf269f806561231a096f4cd8c033503beaed Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus Date: Wed, 27 Oct 2021 17:07:16 +0300 Subject: [PATCH 69/69] Fixed small issue in config --- properties_pane/entity_level/entityLevelConfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/properties_pane/entity_level/entityLevelConfig.json b/properties_pane/entity_level/entityLevelConfig.json index a586772..451ca73 100644 --- a/properties_pane/entity_level/entityLevelConfig.json +++ b/properties_pane/entity_level/entityLevelConfig.json @@ -1016,7 +1016,7 @@ making sure that you maintain a proper JSON format. }, { "propertyName": "Auto summarize", - "propertyKeyword": "autosummarize ", + "propertyKeyword": "autosummarize", "propertyType": "checkbox", "propertyTooltip": "Defines whether a summarization run is invoked for the previous page range whenever an insertion is detected on the next one.", "defaultValue": true